Spaces:
Running
on
Zero
Running
on
Zero
Upload llmdolphin.py
Browse files- llmdolphin.py +14 -0
llmdolphin.py
CHANGED
@@ -24,6 +24,19 @@ llm_models = {
|
|
24 |
"Nemo-12B-Marlin-v4.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v4-i1-GGUF", MessagesFormatterType.MISTRAL],
|
25 |
"Nemo-12B-Marlin-v5-Q4_K_M.gguf": ["starble-dev/Nemo-12B-Marlin-v5-GGUF", MessagesFormatterType.CHATML],
|
26 |
"NemoDori-v0.2-Upscaled.1-14B.Q4_K_M.gguf": ["mradermacher/NemoDori-v0.2-Upscaled.1-14B-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
27 |
"dolphinmaid_l3-1_01sl-q5ks.gguf": ["Dunjeon/DolphinMaid_L3.1_8B-01_GGUF", MessagesFormatterType.LLAMA_3],
|
28 |
"TypeI-12B.Q4_K_S.gguf": ["mradermacher/TypeI-12B-GGUF", MessagesFormatterType.CHATML],
|
29 |
"lyralin-12b-v1-q5_k_m.gguf": ["NGalrion/Lyralin-12B-v1-Q5_K_M-GGUF", MessagesFormatterType.CHATML],
|
@@ -66,6 +79,7 @@ llm_models = {
|
|
66 |
"L3-SAO-MIX-8B-V1.i1-Q5_K_M.gguf": ["mradermacher/L3-SAO-MIX-8B-V1-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
67 |
"bestofllama3-8b-stock-q5_k_m.gguf": ["bunnycore/BestofLLama3-8B-stock-Q5_K_M-GGUF", MessagesFormatterType.LLAMA_3],
|
68 |
"L3-Umbral-Mind-RP-v3.0-8B-Q5_K_M.gguf": ["bartowski/L3-Umbral-Mind-RP-v3.0-8B-GGUF", MessagesFormatterType.LLAMA_3],
|
|
|
69 |
"Llama-3-8B-Stroganoff-2.0.Q5_K_M.gguf": ["RichardErkhov/HiroseKoichi_-_Llama-3-8B-Stroganoff-2.0-gguf", MessagesFormatterType.LLAMA_3],
|
70 |
"L3-8B-Helium3.Q5_K_M.gguf": ["mradermacher/L3-8B-Helium3-GGUF", MessagesFormatterType.LLAMA_3],
|
71 |
"MN-12B-Lyra-v1.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Lyra-v1-i1-GGUF", MessagesFormatterType.CHATML],
|
|
|
24 |
"Nemo-12B-Marlin-v4.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v4-i1-GGUF", MessagesFormatterType.MISTRAL],
|
25 |
"Nemo-12B-Marlin-v5-Q4_K_M.gguf": ["starble-dev/Nemo-12B-Marlin-v5-GGUF", MessagesFormatterType.CHATML],
|
26 |
"NemoDori-v0.2-Upscaled.1-14B.Q4_K_M.gguf": ["mradermacher/NemoDori-v0.2-Upscaled.1-14B-GGUF", MessagesFormatterType.MISTRAL],
|
27 |
+
"NemoReRemix-12B-Q4_K_M.gguf": ["bartowski/NemoReRemix-12B-GGUF", MessagesFormatterType.MISTRAL],
|
28 |
+
"Rocinante-12B-v1b-Q4_K_M.gguf": ["BeaverAI/Rocinante-12B-v1b-GGUF", MessagesFormatterType.CHATML],
|
29 |
+
"HolyNemo-12B.Q4_K_M.gguf": ["mradermacher/HolyNemo-12B-GGUF", MessagesFormatterType.MISTRAL],
|
30 |
+
"mistral-nemo-gutenberg-12B-v2.Q4_K_M.gguf": ["mradermacher/mistral-nemo-gutenberg-12B-v2-GGUF", MessagesFormatterType.MISTRAL],
|
31 |
+
"KukulStanta-InfinityRP-7B-slerp.Q5_K_M.gguf": ["mradermacher/KukulStanta-InfinityRP-7B-slerp-GGUF", MessagesFormatterType.MISTRAL],
|
32 |
+
"Rocinante-12B-v1a-Q4_K_M.gguf": ["BeaverAI/Rocinante-12B-v1a-GGUF", MessagesFormatterType.MISTRAL],
|
33 |
+
"gemma-2-9b-it-WPO-HB.Q4_K_M.gguf": ["mradermacher/gemma-2-9b-it-WPO-HB-GGUF", MessagesFormatterType.ALPACA],
|
34 |
+
"mistral-nemo-bophades-12B.Q4_K_M.gguf": ["mradermacher/mistral-nemo-bophades-12B-GGUF", MessagesFormatterType.MISTRAL],
|
35 |
+
"Stella-mistral-nemo-12B.Q4_K_S.gguf": ["mradermacher/Stella-mistral-nemo-12B-GGUF", MessagesFormatterType.MISTRAL],
|
36 |
+
"Gemma-2-Ataraxy-9B.Q4_K_M.gguf": ["mradermacher/Gemma-2-Ataraxy-9B-GGUF", MessagesFormatterType.ALPACA],
|
37 |
+
"NemoRemix-Magnum_V2_Base-12B.Q4_K_S.gguf": ["mradermacher/NemoRemix-Magnum_V2_Base-12B-GGUF", MessagesFormatterType.MISTRAL],
|
38 |
+
"Synatra-7B-v0.3-dpo.Q5_K_M.gguf": ["mradermacher/Synatra-7B-v0.3-dpo-GGUF", MessagesFormatterType.MISTRAL],
|
39 |
+
"OpenCrystal-12B-Instruct.Q4_K_M.gguf": ["mradermacher/OpenCrystal-12B-Instruct-GGUF", MessagesFormatterType.MISTRAL],
|
40 |
"dolphinmaid_l3-1_01sl-q5ks.gguf": ["Dunjeon/DolphinMaid_L3.1_8B-01_GGUF", MessagesFormatterType.LLAMA_3],
|
41 |
"TypeI-12B.Q4_K_S.gguf": ["mradermacher/TypeI-12B-GGUF", MessagesFormatterType.CHATML],
|
42 |
"lyralin-12b-v1-q5_k_m.gguf": ["NGalrion/Lyralin-12B-v1-Q5_K_M-GGUF", MessagesFormatterType.CHATML],
|
|
|
79 |
"L3-SAO-MIX-8B-V1.i1-Q5_K_M.gguf": ["mradermacher/L3-SAO-MIX-8B-V1-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
80 |
"bestofllama3-8b-stock-q5_k_m.gguf": ["bunnycore/BestofLLama3-8B-stock-Q5_K_M-GGUF", MessagesFormatterType.LLAMA_3],
|
81 |
"L3-Umbral-Mind-RP-v3.0-8B-Q5_K_M.gguf": ["bartowski/L3-Umbral-Mind-RP-v3.0-8B-GGUF", MessagesFormatterType.LLAMA_3],
|
82 |
+
"Tess-3-Mistral-Nemo-Q4_K_M.gguf": ["bartowski/Tess-3-Mistral-Nemo-GGUF", MessagesFormatterType.MISTRAL],
|
83 |
"Llama-3-8B-Stroganoff-2.0.Q5_K_M.gguf": ["RichardErkhov/HiroseKoichi_-_Llama-3-8B-Stroganoff-2.0-gguf", MessagesFormatterType.LLAMA_3],
|
84 |
"L3-8B-Helium3.Q5_K_M.gguf": ["mradermacher/L3-8B-Helium3-GGUF", MessagesFormatterType.LLAMA_3],
|
85 |
"MN-12B-Lyra-v1.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Lyra-v1-i1-GGUF", MessagesFormatterType.CHATML],
|