smakamali commited on
Commit
3c888d1
1 Parent(s): 210b573

improve generation configurations, disable temperature by default

Browse files
Files changed (1) hide show
  1. app.py +8 -7
app.py CHANGED
@@ -128,13 +128,14 @@ def summarize_text(title,text,temperature,words,use_api=False,api_token=None,do_
128
  model_kwargs1 = {"temperature":temperature ,
129
  "do_sample":do_sample,
130
  "min_new_tokens":200-25,
131
- "max_new_tokens":200+25
 
132
  }
133
  model_kwargs2 = {"temperature":temperature ,
134
  "do_sample":do_sample,
135
- "min_new_tokens":words-25,
136
- "max_new_tokens":words+25,
137
- 'repetition_penalty':2.0
138
  }
139
  if not do_sample:
140
  del model_kwargs1["temperature"]
@@ -246,7 +247,7 @@ def summarize_text(title,text,temperature,words,use_api=False,api_token=None,do_
246
  PARTIAL SUMMARIES:\n
247
  `{doc_summaries}`\n
248
  ----------------------- \n
249
- Generate an executive summary of the whole text in maximum {words} words that contains the main messages, points, and arguments presented in the video as bullet points.\n
250
  EXECUTIVE SUMMARY:\n
251
  """
252
  combine_prompt = PromptTemplate(
@@ -361,8 +362,8 @@ with gr.Blocks() as demo:
361
  with gr.Accordion("Summarization Settings",open=False):
362
  with gr.Row():
363
  # use_llm_api = gr.Checkbox(label="Summarize using the HuggingFaceHub API.",visible=True)
364
- do_sample = gr.Checkbox(label="Set the Temperature",value=True,visible=True)
365
- temperature = gr.Slider(minimum=0.01,maximum=1.0,value=0.25,label="Generation temperature",visible=True)
366
  words = gr.Slider(minimum=100,maximum=500,value=100,label="Length of the summary")
367
 
368
  gr.Markdown("# Results")
 
128
  model_kwargs1 = {"temperature":temperature ,
129
  "do_sample":do_sample,
130
  "min_new_tokens":200-25,
131
+ "max_new_tokens":200+25,
132
+ 'repetition_penalty':20.0
133
  }
134
  model_kwargs2 = {"temperature":temperature ,
135
  "do_sample":do_sample,
136
+ "min_new_tokens":words,
137
+ "max_new_tokens":words+100,
138
+ 'repetition_penalty':20.0
139
  }
140
  if not do_sample:
141
  del model_kwargs1["temperature"]
 
247
  PARTIAL SUMMARIES:\n
248
  `{doc_summaries}`\n
249
  ----------------------- \n
250
+ Generate an executive summary of the whole text in maximum {words} words that contains the main messages, points, and arguments presented in the video as bullet points. Avoid duplications or redundant information. \n
251
  EXECUTIVE SUMMARY:\n
252
  """
253
  combine_prompt = PromptTemplate(
 
362
  with gr.Accordion("Summarization Settings",open=False):
363
  with gr.Row():
364
  # use_llm_api = gr.Checkbox(label="Summarize using the HuggingFaceHub API.",visible=True)
365
+ do_sample = gr.Checkbox(label="Set the Temperature",value=False,visible=True)
366
+ temperature = gr.Slider(minimum=0.01,maximum=1.0,value=0.25,label="Generation temperature",visible=False)
367
  words = gr.Slider(minimum=100,maximum=500,value=100,label="Length of the summary")
368
 
369
  gr.Markdown("# Results")