Update config.json
Browse files- config.json +68 -11
config.json
CHANGED
@@ -7,16 +7,55 @@
|
|
7 |
},
|
8 |
"llmModels": [
|
9 |
{
|
10 |
-
"model": "
|
11 |
-
"name": "
|
12 |
"maxContext": 16000,
|
13 |
"maxResponse": 4000,
|
14 |
"quoteMaxToken": 13000,
|
15 |
"maxTemperature": 1.2,
|
16 |
-
"
|
|
|
17 |
"censor": false,
|
18 |
"vision": false,
|
19 |
-
"datasetProcess":
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
"toolChoice": true,
|
21 |
"functionCall": false,
|
22 |
"customCQPrompt": "",
|
@@ -44,8 +83,26 @@
|
|
44 |
"defaultConfig": {}
|
45 |
},
|
46 |
{
|
47 |
-
"model": "gpt-3.5-turbo
|
48 |
-
"name": "gpt-3.5-turbo
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
49 |
"maxContext": 16000,
|
50 |
"maxResponse": 16000,
|
51 |
"quoteMaxToken": 13000,
|
@@ -55,7 +112,7 @@
|
|
55 |
"vision": false,
|
56 |
"datasetProcess": true,
|
57 |
"toolChoice": true,
|
58 |
-
"functionCall":
|
59 |
"customCQPrompt": "",
|
60 |
"customExtractPrompt": "",
|
61 |
"defaultSystemChatPrompt": "",
|
@@ -91,7 +148,7 @@
|
|
91 |
"vision": true,
|
92 |
"datasetProcess": false,
|
93 |
"toolChoice": true,
|
94 |
-
"functionCall":
|
95 |
"customCQPrompt": "",
|
96 |
"customExtractPrompt": "",
|
97 |
"defaultSystemChatPrompt": "",
|
@@ -107,10 +164,10 @@
|
|
107 |
"inputPrice": 0,
|
108 |
"outputPrice": 0,
|
109 |
"censor": false,
|
110 |
-
"vision":
|
111 |
"datasetProcess": false,
|
112 |
"toolChoice": true,
|
113 |
-
"functionCall":
|
114 |
"customCQPrompt": "",
|
115 |
"customExtractPrompt": "",
|
116 |
"defaultSystemChatPrompt": "",
|
@@ -128,7 +185,7 @@
|
|
128 |
"vision": true,
|
129 |
"datasetProcess": false,
|
130 |
"toolChoice": true,
|
131 |
-
"functionCall":
|
132 |
"customCQPrompt": "",
|
133 |
"customExtractPrompt": "",
|
134 |
"defaultSystemChatPrompt": "",
|
|
|
7 |
},
|
8 |
"llmModels": [
|
9 |
{
|
10 |
+
"model": "glm-3-turbo",
|
11 |
+
"name": "glm-3-turbo",
|
12 |
"maxContext": 16000,
|
13 |
"maxResponse": 4000,
|
14 |
"quoteMaxToken": 13000,
|
15 |
"maxTemperature": 1.2,
|
16 |
+
"inputPrice": 0,
|
17 |
+
"outputPrice": 0,
|
18 |
"censor": false,
|
19 |
"vision": false,
|
20 |
+
"datasetProcess": true,
|
21 |
+
"toolChoice": true,
|
22 |
+
"functionCall": false,
|
23 |
+
"customCQPrompt": "",
|
24 |
+
"customExtractPrompt": "",
|
25 |
+
"defaultSystemChatPrompt": "",
|
26 |
+
"defaultConfig": {}
|
27 |
+
},
|
28 |
+
{
|
29 |
+
"model": "glm-4",
|
30 |
+
"name": "glm-4",
|
31 |
+
"maxContext": 16000,
|
32 |
+
"maxResponse": 4000,
|
33 |
+
"quoteMaxToken": 13000,
|
34 |
+
"maxTemperature": 1.2,
|
35 |
+
"inputPrice": 0,
|
36 |
+
"outputPrice": 0,
|
37 |
+
"censor": false,
|
38 |
+
"vision": false,
|
39 |
+
"datasetProcess": true,
|
40 |
+
"toolChoice": true,
|
41 |
+
"functionCall": false,
|
42 |
+
"customCQPrompt": "",
|
43 |
+
"customExtractPrompt": "",
|
44 |
+
"defaultSystemChatPrompt": "",
|
45 |
+
"defaultConfig": {}
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"model": "chatglm-pro",
|
49 |
+
"name": "chatglm-pro",
|
50 |
+
"maxContext": 16000,
|
51 |
+
"maxResponse": 4000,
|
52 |
+
"quoteMaxToken": 13000,
|
53 |
+
"maxTemperature": 1.2,
|
54 |
+
"inputPrice": 0,
|
55 |
+
"outputPrice": 0,
|
56 |
+
"censor": false,
|
57 |
+
"vision": false,
|
58 |
+
"datasetProcess": true,
|
59 |
"toolChoice": true,
|
60 |
"functionCall": false,
|
61 |
"customCQPrompt": "",
|
|
|
83 |
"defaultConfig": {}
|
84 |
},
|
85 |
{
|
86 |
+
"model": "gpt-3.5-turbo",
|
87 |
+
"name": "gpt-3.5-turbo",
|
88 |
+
"maxContext": 16000,
|
89 |
+
"maxResponse": 4000,
|
90 |
+
"quoteMaxToken": 13000,
|
91 |
+
"maxTemperature": 1.2,
|
92 |
+
"charsPointsPrice": 0,
|
93 |
+
"censor": false,
|
94 |
+
"vision": false,
|
95 |
+
"datasetProcess": false,
|
96 |
+
"toolChoice": true,
|
97 |
+
"functionCall": false,
|
98 |
+
"customCQPrompt": "",
|
99 |
+
"customExtractPrompt": "",
|
100 |
+
"defaultSystemChatPrompt": "",
|
101 |
+
"defaultConfig": {}
|
102 |
+
},
|
103 |
+
{
|
104 |
+
"model": "gpt-4o-mini",
|
105 |
+
"name": "gpt-4o-mini",
|
106 |
"maxContext": 16000,
|
107 |
"maxResponse": 16000,
|
108 |
"quoteMaxToken": 13000,
|
|
|
112 |
"vision": false,
|
113 |
"datasetProcess": true,
|
114 |
"toolChoice": true,
|
115 |
+
"functionCall": true,
|
116 |
"customCQPrompt": "",
|
117 |
"customExtractPrompt": "",
|
118 |
"defaultSystemChatPrompt": "",
|
|
|
148 |
"vision": true,
|
149 |
"datasetProcess": false,
|
150 |
"toolChoice": true,
|
151 |
+
"functionCall": true,
|
152 |
"customCQPrompt": "",
|
153 |
"customExtractPrompt": "",
|
154 |
"defaultSystemChatPrompt": "",
|
|
|
164 |
"inputPrice": 0,
|
165 |
"outputPrice": 0,
|
166 |
"censor": false,
|
167 |
+
"vision": true,
|
168 |
"datasetProcess": false,
|
169 |
"toolChoice": true,
|
170 |
+
"functionCall": true,
|
171 |
"customCQPrompt": "",
|
172 |
"customExtractPrompt": "",
|
173 |
"defaultSystemChatPrompt": "",
|
|
|
185 |
"vision": true,
|
186 |
"datasetProcess": false,
|
187 |
"toolChoice": true,
|
188 |
+
"functionCall": true,
|
189 |
"customCQPrompt": "",
|
190 |
"customExtractPrompt": "",
|
191 |
"defaultSystemChatPrompt": "",
|