rahgadda commited on
Commit
df9fbb6
·
verified ·
1 Parent(s): 2e74bbe

Initial Draft

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -228,7 +228,7 @@ def fn_generate_QnA_response(mv_selected_model, mv_user_question, lv_vector_stor
228
  )
229
  print("Formatted Prompt - " + lv_qa_formatted_prompt)
230
 
231
- lv_llm_response = lv_model(lv_qa_formatted_prompt).text
232
  # print("LLM Response" +lv_llm_response)
233
 
234
  print("Step5: LLM response generated")
@@ -280,7 +280,7 @@ def fn_generate_API_QnA_response(mv_selected_model, mv_user_question, lv_vector_
280
  print("Step4: Generating LLM response")
281
  fn_display_user_messages("Step4: Generating LLM response","Info", mv_processing_message)
282
 
283
- lv_llm_response = lv_model.generate_content(lv_qa_prompt)
284
 
285
  print("Step5: LLM response generated")
286
  fn_display_user_messages("Step5: LLM response generated","Info", mv_processing_message)
 
228
  )
229
  print("Formatted Prompt - " + lv_qa_formatted_prompt)
230
 
231
+ lv_llm_response = lv_model(lv_qa_formatted_prompt)
232
  # print("LLM Response" +lv_llm_response)
233
 
234
  print("Step5: LLM response generated")
 
280
  print("Step4: Generating LLM response")
281
  fn_display_user_messages("Step4: Generating LLM response","Info", mv_processing_message)
282
 
283
+ lv_llm_response = lv_model.generate_content(lv_qa_formatted_prompt).text
284
 
285
  print("Step5: LLM response generated")
286
  fn_display_user_messages("Step5: LLM response generated","Info", mv_processing_message)