LoraByTanger / char /Mobius_梅比乌斯 /莫比乌斯-re(v3)配置文件.json
Tanger's picture
Upload 12 files
1e6986d
raw
history blame
1.91 kB
{
"pretrained_model_name_or_path": "E:/stable-diffusion-webui_23-01-20/models/Stable-diffusion/anything-v4.5-pruned.safetensors",
"v2": false,
"v_parameterization": false,
"logging_dir": "",
"train_data_dir": "E:/\u56fe\u50cf\u5904\u7406/mobius-re",
"reg_data_dir": "",
"output_dir": "C:/Users/Administrator/Desktop/lora/lora/\u83ab\u6bd4\u4e4c\u65af-re(v3)",
"max_resolution": "960,960",
"learning_rate": "1.3e-4",
"lr_scheduler": "constant_with_warmup",
"lr_warmup": "7",
"train_batch_size": 2,
"epoch": "20",
"save_every_n_epochs": "1",
"mixed_precision": "fp16",
"save_precision": "fp16",
"seed": "100861123",
"num_cpu_threads_per_process": 8,
"cache_latents": true,
"caption_extension": "",
"enable_bucket": true,
"gradient_checkpointing": false,
"full_fp16": false,
"no_token_padding": false,
"stop_text_encoder_training": 0,
"use_8bit_adam": false,
"xformers": true,
"save_model_as": "safetensors",
"shuffle_caption": true,
"save_state": false,
"resume": "",
"prior_loss_weight": 1.0,
"text_encoder_lr": "1e-4",
"unet_lr": "1.6e-4",
"network_dim": 192,
"lora_network_weights": "",
"color_aug": false,
"flip_aug": false,
"clip_skip": 2,
"gradient_accumulation_steps": 1.0,
"mem_eff_attn": false,
"output_name": "(v3)mobius(mbor,mbac,mbsw,mbothers)",
"model_list": "custom",
"max_token_length": "75",
"max_train_epochs": "",
"max_data_loader_n_workers": "",
"network_alpha": 128,
"training_comment": "",
"keep_tokens": 3,
"lr_scheduler_num_cycles": "",
"lr_scheduler_power": "",
"persistent_data_loader_workers": true,
"bucket_no_upscale": true,
"random_crop": false,
"bucket_reso_steps": 64.0,
"caption_dropout_every_n_epochs": 0.0,
"caption_dropout_rate": 0,
"optimizer": "AdamW",
"optimizer_args": "",
"noise_offset": "",
"LoRA_type": "Standard",
"conv_dim": 1,
"conv_alpha": 1
}