|
{ |
|
"pretrained_model_name_or_path": "SG161222/Realistic_Vision_V5.1_noVAE", |
|
"pretrained_vae_name_or_path": null, |
|
"revision": "main", |
|
"tokenizer_name": null, |
|
"instance_data_dir": null, |
|
"class_data_dir": null, |
|
"instance_prompt": null, |
|
"class_prompt": null, |
|
"save_sample_prompt": "photo of myLora person", |
|
"save_sample_negative_prompt": null, |
|
"n_save_sample": 4, |
|
"save_guidance_scale": 7.5, |
|
"save_infer_steps": 20, |
|
"pad_tokens": false, |
|
"with_prior_preservation": true, |
|
"prior_loss_weight": 1.0, |
|
"num_class_images": 50, |
|
"output_dir": "drive/MyDrive/stable_diffusion_weights/myLora", |
|
"seed": 1337, |
|
"resolution": 512, |
|
"center_crop": false, |
|
"train_text_encoder": true, |
|
"train_batch_size": 1, |
|
"sample_batch_size": 4, |
|
"num_train_epochs": 40, |
|
"max_train_steps": 2000, |
|
"gradient_accumulation_steps": 1, |
|
"gradient_checkpointing": false, |
|
"learning_rate": 1e-06, |
|
"scale_lr": false, |
|
"lr_scheduler": "constant", |
|
"lr_warmup_steps": 0, |
|
"use_8bit_adam": true, |
|
"adam_beta1": 0.9, |
|
"adam_beta2": 0.999, |
|
"adam_weight_decay": 0.01, |
|
"adam_epsilon": 1e-08, |
|
"max_grad_norm": 1.0, |
|
"push_to_hub": false, |
|
"hub_token": null, |
|
"hub_model_id": null, |
|
"logging_dir": "logs", |
|
"log_interval": 10, |
|
"save_interval": 10000, |
|
"save_min_steps": 0, |
|
"mixed_precision": "no", |
|
"not_cache_latents": false, |
|
"hflip": false, |
|
"local_rank": -1, |
|
"concepts_list": [ |
|
{ |
|
"instance_prompt": "photo of myLora person", |
|
"class_prompt": "photo of a person", |
|
"instance_data_dir": "/content/data/myLora", |
|
"class_data_dir": "/content/data/person" |
|
} |
|
], |
|
"read_prompts_from_txts": false |
|
} |