LHC88 commited on
Commit
743a687
1 Parent(s): 7c14c87

Create README.md

Browse files
Files changed (1) hide show
  1. README.md +191 -0
README.md ADDED
@@ -0,0 +1,191 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ inference: false
3
+ language:
4
+ - en
5
+ - de
6
+ - fr
7
+ - it
8
+ - es
9
+ - id
10
+ - ar
11
+ - ko
12
+ - no
13
+ - ru
14
+ library_name: transformers
15
+ license: apache-2.0
16
+ model_creator: LHC
17
+ model_name: XPurpose-ClownCar-v0
18
+ model_type: Mixtral MoE
19
+ pipeline_tag: text-generation
20
+ prompt_template: '<|im_start|>system
21
+
22
+ {system_message}<|im_end|>
23
+
24
+ <|im_start|>user
25
+
26
+ {prompt}<|im_end|>
27
+
28
+ <|im_start|>assistant
29
+
30
+ '
31
+ quantized_by: LHC
32
+ tags:
33
+ - mistral
34
+ - finetune
35
+ - dpo
36
+ - multi-language
37
+ - multi-purpose
38
+ - MoE
39
+ - Mixture-of-Experts
40
+ - mixtral
41
+ ---
42
+ <!-- markdownlint-disable MD041 -->
43
+
44
+ <!-- header start -->
45
+ <!-- 200823 -->
46
+ <div style="display: flex; justify-content: space-between; width: 100%;">
47
+ <div style="display: flex; flex-direction: column; align-items: flex-start;">
48
+ <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://www.linkedin.com/in/lucas-h%C3%A4nke-de-cansino-8b8521234/">Chat & support: LHC's LinkedIn</a></p>
49
+ </div>
50
+ <div style="display: flex; flex-direction: column; align-items: flex-end;">
51
+ <p style="margin-top: 0.5em; margin-bottom: 0em;"><a href="https://github.com/sponsors/l4b4r4b4b4">Want to contribute? LHC's Github Sponsors</a></p>
52
+ </div>
53
+ </div>
54
+
55
+ <hr style="margin-top: 1.0em; margin-bottom: 1.0em;">
56
+ <!-- header end -->
57
+
58
+ <!-- description start -->
59
+ XPurpose-ClownCar-v0 is a multi-purpose MoE-model with the following expert configuration.
60
+
61
+ ```yaml
62
+ base_model: openaccess-ai-collective/DPOpenHermes-7B
63
+ dtype: bfloat16
64
+ experts:
65
+ - positive_prompts:
66
+ - instruction
67
+ - solutions
68
+ - chat
69
+ - questions
70
+ - comprehension
71
+ source_model: teknium/OpenHermes-2.5-Mistral-7B
72
+ - negative_prompts:
73
+ - chat
74
+ - questions
75
+ - python
76
+ positive_prompts:
77
+ - coding
78
+ - programming
79
+ - code
80
+ - programming language
81
+ source_model: codellama/CodeLlama-13b-hf
82
+ - negative_prompts:
83
+ - chat
84
+ - questions
85
+ positive_prompts:
86
+ - python
87
+ - pip
88
+ - coding
89
+ - programming
90
+ - code
91
+ - programming language
92
+ source_model: codellama/CodeLlama-13b-Python-hf
93
+ - negative_prompts:
94
+ - chat
95
+ - questions
96
+ positive_prompts:
97
+ - mathematics
98
+ - optimization
99
+ - step-by-step
100
+ - science
101
+ source_model: cognitivecomputations/dolphin-2.6-mistral-7b-dpo
102
+ - negative_prompts:
103
+ - chat
104
+ - questions
105
+ positive_prompts:
106
+ - bedtime story
107
+ - Once upon a time
108
+ - storytelling
109
+ - narrator
110
+ source_model: tom92119/llama-2-7b-bedtime-story
111
+ - negative_prompts:
112
+ - chat
113
+ - questions
114
+ positive_prompts:
115
+ - story
116
+ - Once upon a time
117
+ - storytelling
118
+ - narrator
119
+ source_model: Norquinal/Mistral-7B-storywriter
120
+ - negative_prompts:
121
+ - chat
122
+ - questions
123
+ - instruction
124
+ - solutions
125
+ - chat
126
+ - comprehension
127
+ - mathematics
128
+ - optimization
129
+ - code
130
+ - step-by-step
131
+ - science
132
+ positive_prompts:
133
+ - function calls
134
+ - functions
135
+ - constrained grammar
136
+ - API calls
137
+ - LLM Tools
138
+ source_model: meetkai/functionary-small-v2.2
139
+ - positive_prompts:
140
+ - indonesian
141
+ - indonesia
142
+ source_model: azale-ai/Starstreak-7b-beta
143
+ - positive_prompts:
144
+ - arabic
145
+ - arab
146
+ source_model: gagan3012/Mistral_arabic_dpo
147
+ - positive_prompts:
148
+ - korean
149
+ - korea
150
+ source_model: davidkim205/komt-mistral-7b-v1
151
+ - positive_prompts:
152
+ - chinese
153
+ - china
154
+ source_model: OpenBuddy/openbuddy-zephyr-7b-v14.1
155
+ - positive_prompts:
156
+ - hindi
157
+ - india
158
+ source_model: manishiitg/open-aditi-hi-v1
159
+ - positive_prompts:
160
+ - german
161
+ - deutsch
162
+ - Germany
163
+ source_model: VAGOsolutions/SauerkrautLM-7b-v1-mistral
164
+ - positive_prompts:
165
+ - Norway
166
+ - Norwegian
167
+ - Norsk
168
+ source_model: bineric/NorskGPT-Mistral-7b
169
+ - positive_prompts:
170
+ - Russian
171
+ - Russia
172
+ - "\u0420\u0443\u0441\u0441\u043A\u0438\u0439"
173
+ - "\u0420\u043E\u0441\u0441\u0438\u044F"
174
+ source_model: Droidfanat/llama-2-7b-custom-russian
175
+ gate_mode: hidden
176
+ ```
177
+ <!-- description end -->
178
+
179
+ <!-- prompt-template start -->
180
+ ## Prompt template: ChatML
181
+
182
+ ```
183
+ <|im_start|>system
184
+ {system_message}<|im_end|>
185
+ <|im_start|>user
186
+ {prompt}<|im_end|>
187
+ <|im_start|>assistant
188
+
189
+ ```
190
+
191
+ <!-- prompt-template end -->