Files changed (1) hide show
  1. config.json +5 -37
config.json CHANGED
@@ -3,49 +3,17 @@
3
  "architectures": [
4
  "XLMRobertaModel"
5
  ],
 
 
 
6
  "attention_probs_dropout_prob": 0.1,
7
- "auto_map": {
8
- "AutoConfig": "jinaai/xlm-roberta-flash-implementation--configuration_xlm_roberta.XLMRobertaFlashConfig",
9
- "AutoModel": "jinaai/xlm-roberta-flash-implementation--modeling_lora.XLMRobertaLoRA",
10
- "AutoModelForMaskedLM": "jinaai/xlm-roberta-flash-implementation--modeling_xlm_roberta.XLMRobertaForMaskedLM",
11
- "AutoModelForPreTraining": "jinaai/xlm-roberta-flash-implementation--modeling_xlm_roberta.XLMRobertaForPreTraining"
12
- },
13
- "bos_token_id": 0,
14
- "classifier_dropout": null,
15
- "emb_pooler": null,
16
- "eos_token_id": 2,
17
  "hidden_act": "gelu",
18
  "hidden_dropout_prob": 0.1,
19
  "hidden_size": 1024,
20
  "initializer_range": 0.02,
21
  "intermediate_size": 4096,
22
  "layer_norm_eps": 1e-05,
23
- "load_trained_adapters": true,
24
- "lora_adaptations": ["retrieval.query", "retrieval.passage", "separation", "classification", "text-matching"],
25
- "lora_alpha": 1,
26
- "lora_dropout_p": 0.0,
27
- "lora_main_params_trainable": false,
28
- "lora_rank": 4,
29
- "matryoshka_dimensions": [32, 64, 128, 256, 512, 768, 1024],
30
- "max_position_embeddings": 8194,
31
  "num_attention_heads": 16,
32
  "num_hidden_layers": 24,
33
- "output_past": true,
34
- "pad_token_id": 1,
35
- "position_embedding_type": "rotary",
36
- "rotary_emb_base": 20000.0,
37
- "torch_dtype": "bfloat16",
38
- "transformers_version": "4.30.2",
39
- "truncate_dim": null,
40
- "type_vocab_size": 1,
41
- "use_cache": true,
42
- "use_flash_attn": true,
43
- "vocab_size": 250002,
44
- "task_instructions": {
45
- "retrieval.query": "Represent the query for retrieving evidence documents: ",
46
- "retrieval.passage": "Represent the document for retrieval: ",
47
- "separation": "",
48
- "classification": "",
49
- "text-matching": ""
50
- }
51
- }
 
3
  "architectures": [
4
  "XLMRobertaModel"
5
  ],
6
+ "model_type": "xlm-roberta",
7
+ "max_position_embeddings": 8194,
8
+ "pad_token_id": 1,
9
  "attention_probs_dropout_prob": 0.1,
 
 
 
 
 
 
 
 
 
 
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 1024,
13
  "initializer_range": 0.02,
14
  "intermediate_size": 4096,
15
  "layer_norm_eps": 1e-05,
 
 
 
 
 
 
 
 
16
  "num_attention_heads": 16,
17
  "num_hidden_layers": 24,
18
+ "vocab_size": 250002
19
+ }