OsakanaTeishoku commited on
Commit
76c15ee
·
verified ·
1 Parent(s): daa0894

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -100,10 +100,10 @@ messages_list = [
100
  prompts = [line[0]["content"] for line in messages_list]
101
  prompt_token_ids = [tokenizer.apply_chat_template(messages, add_generation_prompt=True) for messages in messages_list]
102
  sampling_params = vllm.SamplingParams(
103
- temperature=1.5,
104
  max_tokens=1024,
105
  repetition_penalty=1.05,
106
- min_p=0.1,
107
  )
108
  outputs = llm.generate(
109
  prompt_token_ids=prompt_token_ids,
 
100
  prompts = [line[0]["content"] for line in messages_list]
101
  prompt_token_ids = [tokenizer.apply_chat_template(messages, add_generation_prompt=True) for messages in messages_list]
102
  sampling_params = vllm.SamplingParams(
103
+ temperature=0.7,
104
  max_tokens=1024,
105
  repetition_penalty=1.05,
106
+ top_p=0.9,
107
  )
108
  outputs = llm.generate(
109
  prompt_token_ids=prompt_token_ids,