out / pruning_config.json
haibotamiu's picture
Model save
262245a verified
raw
history blame
533 Bytes
{
"begin_pruning_step": 1400,
"end_pruning_step": 12000,
"explicit_prune": {},
"init_strategy": "uniform",
"initial_step": 0,
"not_to_prune": [
"qa_outputs",
"pooler",
"teacher"
],
"policy_begin_step": 1400,
"policy_end_step": 10000,
"prune_layer_types": {
"Linear": {
"name": "weight"
}
},
"pruning_fn": "unstructured_magnitude",
"pruning_fn_default_kwargs": {
"target_sparsity": 0.85
},
"pruning_frequency": 100,
"scheduler": "iterative",
"weight_sparsity_map": {}
}