SmolLM2-360M-Instruct-q0f16-MLC / ndarray-cache.json
CharlieFRuan's picture
Upload folder using huggingface_hub
00a6a48 verified
raw
history blame
78.2 kB
{
"metadata": {
"ParamSize": 194,
"ParamBytes": 723642240.0,
"BitsPerParam": 16.0
},
"records": [
{
"dataPath": "params_shard_0.bin",
"format": "raw-shard",
"nbytes": 94371840,
"records": [
{
"name": "model.embed_tokens.weight",
"shape": [
49152,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 94371840,
"byteOffset": 0
}
],
"md5sum": "14802d97e524406bd41c5cb163fe7c57"
},
{
"dataPath": "params_shard_1.bin",
"format": "raw-shard",
"nbytes": 24581760,
"records": [
{
"name": "model.layers.0.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 0
},
{
"name": "model.layers.0.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1920
},
{
"name": "model.layers.0.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 4917120
},
{
"name": "model.layers.0.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.0.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 14749440
},
{
"name": "model.layers.0.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 17821440
},
{
"name": "model.layers.1.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 19664640
},
{
"name": "model.layers.1.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 19666560
}
],
"md5sum": "aeaa4a974508608290221c5cac299921"
},
{
"dataPath": "params_shard_2.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.1.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.1.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.1.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.1.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.10.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.10.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.10.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.10.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.10.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "b758180d2dc4c3d9b612935e2d1e2017"
},
{
"dataPath": "params_shard_3.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.10.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.11.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.11.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.11.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.11.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.11.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.11.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.12.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.12.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "5eb65ca26cd9dfad815a531215acf635"
},
{
"dataPath": "params_shard_4.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.12.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.12.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.12.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.12.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.13.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.13.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.13.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.13.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.13.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "ff5d52eccb22829ec3c067878da17336"
},
{
"dataPath": "params_shard_5.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.13.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.14.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.14.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.14.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.14.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.14.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.14.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.15.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.15.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "806ca6f2806bee2fac5b5cb0bad4e177"
},
{
"dataPath": "params_shard_6.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.15.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.15.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.15.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.15.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.16.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.16.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.16.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.16.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.16.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "9334c07a231c32d90ec4c15c61514481"
},
{
"dataPath": "params_shard_7.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.16.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.17.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.17.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.17.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.17.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.17.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.17.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.18.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.18.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "e65235d6b3e351376dc6a2ce93110e1a"
},
{
"dataPath": "params_shard_8.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.18.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.18.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.18.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.18.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.19.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.19.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.19.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.19.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.19.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "fd0408f5a12d665d92aa9031aaa08691"
},
{
"dataPath": "params_shard_9.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.19.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.2.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.2.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.2.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.2.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.2.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.2.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.20.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.20.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "259910fefd956f82d03c0c7d569bf621"
},
{
"dataPath": "params_shard_10.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.20.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.20.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.20.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.20.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.21.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.21.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.21.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.21.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.21.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "565d17f33dd7037240db993735c73fd1"
},
{
"dataPath": "params_shard_11.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.21.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.22.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.22.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.22.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.22.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.22.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.22.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.23.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.23.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "07c73400b9124a943507998857673a14"
},
{
"dataPath": "params_shard_12.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.23.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.23.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.23.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.23.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.24.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.24.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.24.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.24.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.24.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "f24a0408c7180d0dbb2e4bc1d194c5b1"
},
{
"dataPath": "params_shard_13.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.24.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.25.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.25.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.25.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.25.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.25.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.25.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.26.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.26.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "49371cb53aa153bbdb641b9132ff5272"
},
{
"dataPath": "params_shard_14.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.26.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.26.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.26.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.26.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.27.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.27.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.27.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.27.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.27.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "d873541cda9345ffc2bdd6dd577af404"
},
{
"dataPath": "params_shard_15.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.27.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.28.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.28.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.28.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.28.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.28.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.28.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.29.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.29.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "3cffe8a7470cec88e92604cb031e24fc"
},
{
"dataPath": "params_shard_16.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.29.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.29.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.29.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.29.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.3.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.3.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.3.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.3.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.3.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "a316e9a58ec0bb02d0e47a13a5f3bce9"
},
{
"dataPath": "params_shard_17.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.3.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.30.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.30.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.30.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.30.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.30.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.30.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.31.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.31.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "6f29a972349185ba6a6140e3f80012a7"
},
{
"dataPath": "params_shard_18.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.31.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.31.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.31.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.31.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.4.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.4.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.4.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.4.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.4.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "2abb43218c7d9f1b88c8de2f2b47516a"
},
{
"dataPath": "params_shard_19.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.4.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.5.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.5.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.5.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.5.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.5.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.5.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.6.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.6.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "baab2b75f9ee14453bbf77055a99a974"
},
{
"dataPath": "params_shard_20.bin",
"format": "raw-shard",
"nbytes": 32568960,
"records": [
{
"name": "model.layers.6.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.6.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.6.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.6.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.layers.7.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
},
{
"name": "model.layers.7.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 14749440
},
{
"name": "model.layers.7.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 19664640
},
{
"name": "model.layers.7.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 29495040
},
{
"name": "model.layers.7.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 29496960
}
],
"md5sum": "af364ef664089b76d38a7113ccb2caf9"
},
{
"dataPath": "params_shard_21.bin",
"format": "raw-shard",
"nbytes": 26424960,
"records": [
{
"name": "model.layers.7.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 0
},
{
"name": "model.layers.8.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 1843200
},
{
"name": "model.layers.8.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 1845120
},
{
"name": "model.layers.8.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 6760320
},
{
"name": "model.layers.8.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 16590720
},
{
"name": "model.layers.8.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 16592640
},
{
"name": "model.layers.8.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 19664640
},
{
"name": "model.layers.9.input_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 21507840
},
{
"name": "model.layers.9.mlp.down_proj.weight",
"shape": [
960,
2560
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 4915200,
"byteOffset": 21509760
}
],
"md5sum": "c5640d5b594521486939e15198e82339"
},
{
"dataPath": "params_shard_22.bin",
"format": "raw-shard",
"nbytes": 14749440,
"records": [
{
"name": "model.layers.9.mlp.gate_up_proj.weight",
"shape": [
5120,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 9830400,
"byteOffset": 0
},
{
"name": "model.layers.9.post_attention_layernorm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 9830400
},
{
"name": "model.layers.9.self_attn.qkv_proj.weight",
"shape": [
1600,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 3072000,
"byteOffset": 9832320
},
{
"name": "model.layers.9.self_attn.o_proj.weight",
"shape": [
960,
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1843200,
"byteOffset": 12904320
},
{
"name": "model.norm.weight",
"shape": [
960
],
"dtype": "float16",
"format": "f32-to-bf16",
"nbytes": 1920,
"byteOffset": 14747520
}
],
"md5sum": "0a90bb9e990eeb6d6ce67b02caeb91f5"
}
]
}