File size: 1,822 Bytes
297d8c5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 |
adam_beta1: 0.9
adam_beta2: 0.999
adam_epsilon: 1.0e-08
adam_weight_decay: 0.0001
adam_weight_decay_text_encoder: null
allow_tf32: false
cache_dir: null
cache_latents: true
caption_column: prompt
center_crop: false
checkpointing_steps: 100000
checkpoints_total_limit: null
class_data_dir: null
class_prompt: null
crops_coords_top_left_h: 0
crops_coords_top_left_w: 0
dataloader_num_workers: 0
dataset_config_name: null
dataset_name: ./f6b1fb0d-1006-4c69-8241-6fb2270d9c13
enable_xformers_memory_efficient_attention: false
gradient_accumulation_steps: 1
gradient_checkpointing: true
hub_model_id: null
hub_token: null
image_column: image
instance_data_dir: null
instance_prompt: in the style of TOK
learning_rate: 0.0005
local_rank: -1
logging_dir: logs
lr_num_cycles: 1
lr_power: 1.0
lr_scheduler: constant
lr_warmup_steps: 0
max_grad_norm: 1.0
max_train_steps: 1500
mixed_precision: bf16
noise_offset: 0
num_class_images: 100
num_new_tokens_per_abstraction: 2
num_train_epochs: 7
num_validation_images: 4
optimizer: adamW
output_dir: besni
pretrained_model_name_or_path: stabilityai/stable-diffusion-xl-base-1.0
pretrained_vae_model_name_or_path: madebyollin/sdxl-vae-fp16-fix
prior_generation_precision: null
prior_loss_weight: 1.0
prodigy_beta3: null
prodigy_decouple: true
prodigy_safeguard_warmup: true
prodigy_use_bias_correction: true
push_to_hub: false
rank: 32
repeats: 12
report_to: tensorboard
resolution: 1024
resume_from_checkpoint: null
revision: null
sample_batch_size: 4
scale_lr: false
seed: 42
snr_gamma: 5.0
text_encoder_lr: 5.0e-05
token_abstraction: TOK
train_batch_size: 2
train_text_encoder: true
train_text_encoder_frac: 0.8
train_text_encoder_ti: false
train_text_encoder_ti_frac: 0.5
use_8bit_adam: false
validation_epochs: 50
validation_prompt: null
variant: null
with_prior_preservation: false
|