srinuksv commited on
Commit
17bad79
·
verified ·
1 Parent(s): c0d2409

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -29
app.py CHANGED
@@ -26,33 +26,27 @@ llm_client = InferenceClient(
26
  model=repo_id,
27
  token=os.getenv("HF_TOKEN"),
28
  )
29
- def summarize_conversation(inference_client: InferenceClient, history: list):
30
- # Construct the full prompt with history
31
  history_text = "\n".join([f"{entry['sender']}: {entry['message']}" for entry in history])
32
- full_prompt = f"{history_text}\n\nSummarize the conversation in three concise points only give me only Summarization in python list formate :\n"
33
-
34
- response = inference_client.post(
35
- json={
36
- "inputs": full_prompt,
37
- "parameters": {"max_new_tokens": 512},
38
- "task": "text-generation",
39
- },
 
 
 
 
 
 
40
  )
41
 
42
- # Decode the response
43
- generated_text = json.loads(response.decode())[0]["generated_text"]
44
-
45
- # Use regex to extract the list inside brackets
46
- matches = re.findall(r'\[(.*?)\]', generated_text)
47
-
48
- # If matches found, extract the content
49
- if matches:
50
- # Assuming we only want the first match, split by commas and strip whitespace
51
- list_items = matches[0].split(',')
52
- cleaned_list = [item.strip() for item in list_items]
53
- return cleaned_list
54
- else:
55
- return generated_text
56
 
57
 
58
  os.environ["HF_TOKEN"] = os.getenv("HF_TOKEN")
@@ -192,12 +186,9 @@ async def save_chat_history(history: dict):
192
  print(user_id)
193
  if 'history' in history and isinstance(history['history'], list):
194
  print("Received history:", history['history']) # Debugging line
195
- cleaned_summary = summarize_conversation(llm_client, history['history'])
196
  print("Cleaned summary:", cleaned_summary) # Debugging line
197
- cleaned_summary = cleaned_summary.strip(" []'")
198
- cleaned_summary_list = [item.strip(" '") for item in cleaned_summary.split(',')]
199
- cleaned_summary_str = ' '.join(cleaned_summary_list)
200
- sf.Lead.update(user_id,{'Description': cleaned_summary_str})
201
  return {"summary": cleaned_summary, "message": "Chat history saved"}
202
  else:
203
  return JSONResponse(status_code=400, content={"message": "Invalid history format"})
 
26
  model=repo_id,
27
  token=os.getenv("HF_TOKEN"),
28
  )
29
+ def summarize_conversation(history: list):
30
+ # Construct the conversation history text
31
  history_text = "\n".join([f"{entry['sender']}: {entry['message']}" for entry in history])
32
+
33
+ # Create a client instance
34
+ client = Client("vilarin/Llama-3.1-8B-Instruct")
35
+
36
+ # Predict the summary using the new parameters
37
+ result = client.predict(
38
+ message=history_text,
39
+ system_prompt="tell me what user interest using bellow conversation give me answer within 20 to 60 words",
40
+ temperature=0.8,
41
+ max_new_tokens=1024,
42
+ top_p=1,
43
+ top_k=20,
44
+ penalty=1.2,
45
+ api_name="/chat"
46
  )
47
 
48
+ # Decode the result
49
+ generated_text = result
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
 
52
  os.environ["HF_TOKEN"] = os.getenv("HF_TOKEN")
 
186
  print(user_id)
187
  if 'history' in history and isinstance(history['history'], list):
188
  print("Received history:", history['history']) # Debugging line
189
+ cleaned_summary = summarize_conversation(history['history'])
190
  print("Cleaned summary:", cleaned_summary) # Debugging line
191
+ sf.Lead.update(user_id,{'Description': cleaned_summary})
 
 
 
192
  return {"summary": cleaned_summary, "message": "Chat history saved"}
193
  else:
194
  return JSONResponse(status_code=400, content={"message": "Invalid history format"})