File size: 369 Bytes
3bd5293 |
1 2 3 4 5 6 7 8 9 10 11 12 |
import transformers
class MCLIPConfig(transformers.PretrainedConfig):
model_type = "M-CLIP"
def __init__(self, modelBase='xlm-roberta-large', transformerDimSize=1024, imageDimSize=768, **kwargs):
self.transformerDimensions = transformerDimSize
self.numDims = imageDimSize
self.modelBase = modelBase
super().__init__(**kwargs)
|