arabellastrange
commited on
Commit
·
a7a15ae
1
Parent(s):
9d67042
editing conversational history use
Browse files- generate_response.py +8 -4
generate_response.py
CHANGED
@@ -46,10 +46,12 @@ def generate_query_response(index, message):
|
|
46 |
string_output = ""
|
47 |
|
48 |
logger.info("Creating query engine with index...")
|
|
|
49 |
query_engine = index.as_query_engine(streaming=True, chat_mode=ChatMode.CONDENSE_QUESTION)
|
50 |
|
51 |
logger.info(f'Input user message: {message}')
|
52 |
-
response = query_engine.query(
|
|
|
53 |
|
54 |
response_text = []
|
55 |
for text in response.response_gen:
|
@@ -75,7 +77,8 @@ def generate_chat_response_with_history(message, history):
|
|
75 |
|
76 |
def generate_chat_response_with_history_rag_return_response(index, message, history):
|
77 |
logger.info("Generating chat response with history and rag...")
|
78 |
-
|
|
|
79 |
messages = collect_history(message, history)
|
80 |
|
81 |
logger.info("Creating query engine with index...")
|
@@ -87,7 +90,8 @@ def generate_chat_response_with_history_rag_return_response(index, message, hist
|
|
87 |
def generate_chat_response_with_history_rag_yield_string(index, message, history):
|
88 |
logger.info("Generating chat response with history and rag...")
|
89 |
string_output = ""
|
90 |
-
|
|
|
91 |
messages = collect_history(message, history)
|
92 |
|
93 |
logger.info("Creating query engine with index...")
|
@@ -155,7 +159,7 @@ def google_question(message, history):
|
|
155 |
DEFAULT_TEMPLATE = """\
|
156 |
Given a conversation (between Human and Assistant) and a follow up message from Human, \
|
157 |
rewrite the message to be a standalone web engine search query that captures all relevant context \
|
158 |
-
from the conversation in keywords.
|
159 |
|
160 |
<Chat History>
|
161 |
{chat_history}
|
|
|
46 |
string_output = ""
|
47 |
|
48 |
logger.info("Creating query engine with index...")
|
49 |
+
|
50 |
query_engine = index.as_query_engine(streaming=True, chat_mode=ChatMode.CONDENSE_QUESTION)
|
51 |
|
52 |
logger.info(f'Input user message: {message}')
|
53 |
+
response = query_engine.query(f"Write a comprehensive but concise response to this query, if conversation history "
|
54 |
+
f"is irrelevant to this query, ignore conversation history: \n '{message}'")
|
55 |
|
56 |
response_text = []
|
57 |
for text in response.response_gen:
|
|
|
77 |
|
78 |
def generate_chat_response_with_history_rag_return_response(index, message, history):
|
79 |
logger.info("Generating chat response with history and rag...")
|
80 |
+
message = (f"Write a comprehensive but concise response to this query, if conversation history is irrelevant to "
|
81 |
+
f"this query, ignore conversation history: \n '{message}'")
|
82 |
messages = collect_history(message, history)
|
83 |
|
84 |
logger.info("Creating query engine with index...")
|
|
|
90 |
def generate_chat_response_with_history_rag_yield_string(index, message, history):
|
91 |
logger.info("Generating chat response with history and rag...")
|
92 |
string_output = ""
|
93 |
+
message = (f"Write a comprehensive but concise response to this query, if conversation history is irrelevant to "
|
94 |
+
f"this query, ignore conversation history: \n '{message}'")
|
95 |
messages = collect_history(message, history)
|
96 |
|
97 |
logger.info("Creating query engine with index...")
|
|
|
159 |
DEFAULT_TEMPLATE = """\
|
160 |
Given a conversation (between Human and Assistant) and a follow up message from Human, \
|
161 |
rewrite the message to be a standalone web engine search query that captures all relevant context \
|
162 |
+
from the conversation in keywords if the previous conversation is relevant to the new message.
|
163 |
|
164 |
<Chat History>
|
165 |
{chat_history}
|