ryanrwatkins commited on
Commit
36b5d2d
Β·
1 Parent(s): aa25df8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -6,6 +6,7 @@ import os
6
  import langchain
7
  import chromadb
8
  import glob
 
9
 
10
  from langchain.embeddings.openai import OpenAIEmbeddings
11
  from langchain.vectorstores import Chroma
@@ -25,6 +26,7 @@ from langchain.chains.question_answering import load_qa_chain
25
  # persist_directory="./embeddings" # Optional, defaults to .chromadb/ in the current directory
26
  #))
27
 
 
28
 
29
  def get_empty_state():
30
  return {"total_tokens": 0, "messages": []}
@@ -113,6 +115,7 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
113
  completion = RetrievalQA.from_chain_type(llm=ChatOpenAI(temperature=temperature, max_tokens=max_tokens, model_name="gpt-3.5-turbo"), chain_type="stuff", retriever=vectordb.as_retriever(), return_source_documents=True)
114
  query = str(system_prompt + history[-context_length*2:] + [prompt_msg])
115
  completion = completion({"query": query})
 
116
  # completion = completion({"question": query, "chat_history": history[-context_length*2:]})
117
 
118
 
@@ -121,8 +124,7 @@ def submit_message(prompt, prompt_template, temperature, max_tokens, context_len
121
 
122
  history.append(prompt_msg)
123
  #history.append(completion.choices[0].message.to_dict())
124
- #history.append(completion["result"].choices[0].message.to_dict())
125
- history.append("test")
126
 
127
  state['total_tokens'] += completion['usage']['total_tokens']
128
 
 
6
  import langchain
7
  import chromadb
8
  import glob
9
+ import logging
10
 
11
  from langchain.embeddings.openai import OpenAIEmbeddings
12
  from langchain.vectorstores import Chroma
 
26
  # persist_directory="./embeddings" # Optional, defaults to .chromadb/ in the current directory
27
  #))
28
 
29
+ logger=logging.getLogger()
30
 
31
  def get_empty_state():
32
  return {"total_tokens": 0, "messages": []}
 
115
  completion = RetrievalQA.from_chain_type(llm=ChatOpenAI(temperature=temperature, max_tokens=max_tokens, model_name="gpt-3.5-turbo"), chain_type="stuff", retriever=vectordb.as_retriever(), return_source_documents=True)
116
  query = str(system_prompt + history[-context_length*2:] + [prompt_msg])
117
  completion = completion({"query": query})
118
+ logger.info(completion["result"])
119
  # completion = completion({"question": query, "chat_history": history[-context_length*2:]})
120
 
121
 
 
124
 
125
  history.append(prompt_msg)
126
  #history.append(completion.choices[0].message.to_dict())
127
+ history.append(completion["result"].choices[0].message.to_dict())
 
128
 
129
  state['total_tokens'] += completion['usage']['total_tokens']
130