File size: 394 Bytes
56ef57e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 |
{
"architectures": [
"CLIPVisionEncoderOnly"
],
"auto_map": {
"AutoConfig": "utils.CLIPVisionEncoderOnlyConfig",
"AutoModel": "utils.CLIPVisionEncoderOnly"
},
"frozen": false,
"lora": null,
"model_name": "openai/clip-vit-base-patch32",
"model_type": "clip_custom_vision_model",
"pretrained": false,
"torch_dtype": "float32",
"transformers_version": "4.40.1"
}
|