Phospy commited on
Commit
b9b80e8
·
1 Parent(s): 0e4bb31

feat: upload nia3 lora model

Browse files
nia3/config_file.toml ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [model_arguments]
2
+ v2 = false
3
+ v_parameterization = false
4
+ pretrained_model_name_or_path = "/content/pretrained_model/Animefull-final-pruned.ckpt"
5
+
6
+ [additional_network_arguments]
7
+ no_metadata = false
8
+ unet_lr = 1e-4
9
+ text_encoder_lr = 5e-5
10
+ network_module = "networks.lora"
11
+ network_dim = 128
12
+ network_alpha = 128
13
+ network_train_unet_only = false
14
+ network_train_text_encoder_only = false
15
+
16
+ [optimizer_arguments]
17
+ optimizer_type = "AdamW8bit"
18
+ learning_rate = 1e4
19
+ max_grad_norm = 1.0
20
+ lr_scheduler = "cosine_with_restarts"
21
+ lr_warmup_steps = 0
22
+ lr_scheduler_num_cycles = 0
23
+
24
+ [dataset_arguments]
25
+ cache_latents = true
26
+ debug_dataset = false
27
+
28
+ [training_arguments]
29
+ output_dir = "/content/LoRA/output"
30
+ output_name = "nia3"
31
+ save_precision = "fp16"
32
+ save_every_n_epochs = 1
33
+ train_batch_size = 5
34
+ max_token_length = 225
35
+ mem_eff_attn = false
36
+ xformers = true
37
+ max_train_epochs = 5
38
+ max_data_loader_n_workers = 8
39
+ persistent_data_loader_workers = true
40
+ seed = 31337
41
+ gradient_checkpointing = false
42
+ gradient_accumulation_steps = 1
43
+ mixed_precision = "fp16"
44
+ clip_skip = 2
45
+ logging_dir = "/content/LoRA/logs"
46
+ log_prefix = "nia3"
47
+ lowram = true
48
+
49
+ [sample_prompt_arguments]
50
+ sample_every_n_epochs = 1
51
+ sample_sampler = "ddim"
52
+
53
+ [dreambooth_arguments]
54
+ prior_loss_weight = 1.0
55
+
56
+ [saving_arguments]
57
+ save_model_as = "safetensors"
nia3/dataset_config.toml ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [[datasets]]
2
+ resolution = 512
3
+ min_bucket_reso = 256
4
+ max_bucket_reso = 1024
5
+ caption_dropout_rate = 0
6
+ caption_tag_dropout_rate = 0
7
+ caption_dropout_every_n_epochs = 0
8
+ flip_aug = false
9
+ color_aug = false
10
+ [[datasets.subsets]]
11
+ image_dir = "/content/LoRA/train_data"
12
+ class_tokens = "mksks style"
13
+ num_repeats = 10
14
+
15
+ [[datasets.subsets]]
16
+ is_reg = true
17
+ image_dir = "/content/LoRA/reg_data"
18
+ class_tokens = "style"
19
+ num_repeats = 1
20
+
21
+
22
+ [general]
23
+ enable_bucket = true
24
+ caption_extension = ".txt"
25
+ shuffle_caption = true
26
+ keep_tokens = 1
27
+ bucket_reso_steps = 64
28
+ bucket_no_upscale = false
nia3/nia3-000001.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ce0c4afc1df1cdfddc18ff1cb74417d1f3f53fbbc6ffe61f894e6f8915ebaa6
3
+ size 151114217
nia3/nia3-000002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93521df4f393eb949738487583d8e86053d29c2bf07af01380d8aa991c09fdfd
3
+ size 151114217
nia3/nia3-000003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d482844d61579cc052088fd029f0997c476c13990f6221457ea44ba2f7773c57
3
+ size 151114217
nia3/nia3-000004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:21ee628fdc875633ffc4ca3690f8e5ebe327be4e6b00007f42d29a10a9db4a4c
3
+ size 151114216
nia3/nia3.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:540e0c9c0c65ce5a07036e5aa211fc5df734070d444997eeadae90005d0a7a17
3
+ size 151114217
nia3/sample/nia3_20230331172148_e000001_01.png ADDED
nia3/sample/nia3_20230331172437_e000002_01.png ADDED
nia3/sample/nia3_20230331173129_e000001_01.png ADDED
nia3/sample/nia3_20230331173441_e000002_01.png ADDED
nia3/sample/nia3_20230331173753_e000003_01.png ADDED
nia3/sample/nia3_20230331174104_e000004_01.png ADDED
nia3/sample/nia3_20230331174414_e000005_01.png ADDED