debug fix
Browse files
metric.py
CHANGED
@@ -67,7 +67,7 @@ def chat_llm_batch(model_id, prompts, limit= 20):
|
|
67 |
for response in client.text.generation.create(
|
68 |
model_id = model_id,
|
69 |
inputs = prompts, # here each prompt is the concatenation of system prompt and user prompt
|
70 |
-
execution_options=CreateExecutionOptions(concurrency_limit=limit, ordered=
|
71 |
parameters=parameters,
|
72 |
):
|
73 |
response_list.append(response.results[0].generated_text)
|
|
|
67 |
for response in client.text.generation.create(
|
68 |
model_id = model_id,
|
69 |
inputs = prompts, # here each prompt is the concatenation of system prompt and user prompt
|
70 |
+
execution_options=CreateExecutionOptions(concurrency_limit=limit, ordered=True),
|
71 |
parameters=parameters,
|
72 |
):
|
73 |
response_list.append(response.results[0].generated_text)
|