File size: 1,716 Bytes
2a7d0aa |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 |
{
"_class_name": "MochiTransformer3DModel",
"_diffusers_version": "0.32.0.dev0",
"_name_or_path": "mochi-1-diffusers",
"activation_fn": "swiglu",
"attention_head_dim": 128,
"in_channels": 12,
"max_sequence_length": 256,
"num_attention_heads": 24,
"num_layers": 48,
"out_channels": null,
"patch_size": 2,
"pooled_projection_dim": 1536,
"qk_norm": "rms_norm",
"quantization_config": {
"_load_in_4bit": true,
"_load_in_8bit": false,
"bnb_4bit_compute_dtype": "bfloat16",
"bnb_4bit_quant_storage": "uint8",
"bnb_4bit_quant_type": "nf4",
"bnb_4bit_use_double_quant": false,
"llm_int8_enable_fp32_cpu_offload": false,
"llm_int8_has_fp16_weight": false,
"llm_int8_skip_modules": [
"final_layer",
"x_embedder.proj",
"t_embedder",
"pos_frequencies",
"t5",
"blocks.0",
"blocks.1",
"blocks.2",
"blocks.3",
"blocks.4",
"blocks.5",
"blocks.6",
"blocks.7",
"blocks.8",
"blocks.9",
"blocks.10",
"blocks.11",
"blocks.12",
"blocks.13",
"blocks.14",
"blocks.15",
"blocks.16",
"blocks.17",
"blocks.18",
"blocks.30",
"blocks.31",
"blocks.32",
"blocks.33",
"blocks.34",
"blocks.35",
"blocks.36",
"blocks.37",
"blocks.38",
"blocks.39",
"blocks.40",
"blocks.41",
"blocks.42",
"blocks.43",
"blocks.44",
"blocks.45",
"blocks.46",
"blocks.47"
],
"llm_int8_threshold": 6.0,
"load_in_4bit": true,
"load_in_8bit": false,
"quant_method": "bitsandbytes"
},
"text_embed_dim": 4096,
"time_embed_dim": 256
}
|