|
model: |
|
checkpoint_path: "./models/aura_flow_0.3.bnb_nf4.safetensors" |
|
pretrained_model_name_or_path: fal/AuraFlow-v0.3 |
|
|
|
dtype: bfloat16 |
|
|
|
denoiser: |
|
use_flash_attn: true |
|
|
|
use_shortcut: true |
|
|
|
flow_matching_ratio: 0.75 |
|
shortcut_max_steps: 128 |
|
shortcut_cfg_scale: 1.0 |
|
|
|
timestep_sampling_type: "sigmoid" |
|
|
|
peft: |
|
- |
|
config: |
|
type: lora |
|
rank: 4 |
|
alpha: 1.0 |
|
dropout: 0.0 |
|
use_bias: false |
|
|
|
dtype: bfloat16 |
|
|
|
include_keys: [ |
|
".attn.", |
|
".mlp.", |
|
".mlpC.", |
|
".mlpX.", |
|
|
|
".modC.", |
|
".modX.", |
|
".modCX.", |
|
] |
|
exclude_keys: |
|
[".text_encoder.", ".vae.", ".t_embedder.", ".final_linear.", ".modF."] |
|
|
|
- |
|
config: |
|
type: lora |
|
rank: 4 |
|
alpha: 1.0 |
|
dropout: 0.0 |
|
use_bias: true |
|
|
|
dtype: bfloat16 |
|
|
|
include_keys: [".t_embedder.", ".shortcut_embedder."] |
|
exclude_keys: [".text_encoder.", ".vae.", ".final_linear.", ".modF."] |
|
|
|
dataset: |
|
folder: "data/pexels-1k-random" |
|
num_repeats: 2 |
|
batch_size: 2 |
|
|
|
bucket_base_size: 1024 |
|
step: 128 |
|
min_size: 384 |
|
do_upscale: false |
|
|
|
caption_processors: [] |
|
|
|
optimizer: |
|
name: "schedulefree.RAdamScheduleFree" |
|
args: |
|
lr: 0.03 |
|
|
|
tracker: |
|
project_name: "auraflow-shortcut-1" |
|
loggers: |
|
- wandb |
|
|
|
saving: |
|
strategy: |
|
per_epochs: 1 |
|
per_steps: null |
|
save_last: true |
|
|
|
callbacks: |
|
- type: "hf_hub" |
|
|
|
name: "shortcut-10" |
|
save_dir: "./output/shortcut-10" |
|
|
|
hub_id: "p1atdev/afv03-lora" |
|
dir_in_repo: "shortcut-10" |
|
|
|
preview: |
|
strategy: |
|
per_epochs: 1 |
|
per_steps: 100 |
|
|
|
callbacks: |
|
|
|
|
|
|
|
- type: "discord" |
|
url: "masked" |
|
|
|
data: |
|
path: "./projects/shortcut/preview.yml" |
|
|
|
seed: 42 |
|
num_train_epochs: 20 |
|
|
|
trainer: |
|
|
|
|
|
gradient_checkpointing: true |
|
gradient_accumulation_steps: 16 |
|
|
|
clip_grad_norm: 1.0 |
|
|
|
torch_compile: true |
|
torch_compile_args: |
|
mode: max-autotune |
|
fullgraph: true |
|
|
|
fp32_matmul_precision: "medium" |
|
|