devve1 commited on
Commit
302780d
1 Parent(s): 5c1de7a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -32
app.py CHANGED
@@ -183,45 +183,44 @@ def self_knowledge(query: str):
183
  """
184
 
185
  def main(query: str, client: QdrantClient, collection_name: str, llm, dense_model: OptimumEncoder, sparse_model: SparseTextEmbedding, past_messages: str):
186
- prompt = build_initial_prompt(query)
187
- gen_text = outlines.generate.text(llm)
 
 
 
 
188
 
189
- gen_choice = outlines.generate.choice(llm, choices=['Domain-Specific Question', 'General Question'])
 
 
 
190
  action = gen_choice(prompt, max_tokens=128, sampling_params=SamplingParams(temperature=0))
191
  print(f'Choice: {action}')
192
 
193
- if action == 'General Question':
194
- prompt = open_query_prompt(past_messages, query)
195
- answer = gen_text(prompt, max_tokens=300, sampling_params=SamplingParams(temperature=0.3))
 
 
 
 
 
 
 
 
 
 
 
 
196
  else:
197
- scored_points = query_hybrid_search(query, client, collection_name, dense_model, sparse_model).points
198
-
199
- docs = [(scored_point.payload['text'], scored_point.payload['metadata']) for scored_point in scored_points]
200
- contents, metadatas = [list(t) for t in zip(*docs)]
201
-
202
- context = "\n".join(contents)
203
-
204
  torch.cuda.empty_cache()
205
- print('HELLO')
206
- gen_choice = outlines.generate.choice(llm, choices=['Yes', 'No'])
207
- prompt = route_llm(context, 'Is the context relevant to the question ?')
208
  action = gen_choice(prompt, max_tokens=128, sampling_params=SamplingParams(temperature=0))
209
- print('HELLO3')
210
- if action == 'Yes':
211
- seen_values = set()
212
- result_metadatas = "\n\n".join(
213
- f'{value}'
214
- for metadata in metadatas
215
- for key, value in metadata.items()
216
- if (value not in seen_values and not seen_values.add(value))
217
- )
218
-
219
- prompt = answer_with_context(context, query)
220
- answer = gen_text(prompt, max_tokens=500, sampling_params=SamplingParams(temperature=0.3))
221
- answer = f"{answer}\n\n\nSource(s) :\n\n{result_metadatas}"
222
-
223
- if not st.session_state.documents_only:
224
- answer = f'Documents Based :\n\n{answer}'
225
  else:
226
  if st.session_state.documents_only:
227
  prompt = idk(query)
 
183
  """
184
 
185
  def main(query: str, client: QdrantClient, collection_name: str, llm, dense_model: OptimumEncoder, sparse_model: SparseTextEmbedding, past_messages: str):
186
+ scored_points = query_hybrid_search(query, client, collection_name, dense_model, sparse_model).points
187
+
188
+ docs = [(scored_point.payload['text'], scored_point.payload['metadata']) for scored_point in scored_points]
189
+ contents, metadatas = [list(t) for t in zip(*docs)]
190
+
191
+ context = "\n".join(contents)
192
 
193
+ gen_text = outlines.generate.text(llm)
194
+
195
+ gen_choice = outlines.generate.choice(llm, choices=['Yes', 'No'])
196
+ prompt = route_llm(context, 'Is the context relevant to the question ?')
197
  action = gen_choice(prompt, max_tokens=128, sampling_params=SamplingParams(temperature=0))
198
  print(f'Choice: {action}')
199
 
200
+ if action == 'Yes':
201
+ seen_values = set()
202
+ result_metadatas = "\n\n".join(
203
+ f'{value}'
204
+ for metadata in metadatas
205
+ for key, value in metadata.items()
206
+ if (value not in seen_values and not seen_values.add(value))
207
+ )
208
+
209
+ prompt = answer_with_context(context, query)
210
+ answer = gen_text(prompt, max_tokens=500, sampling_params=SamplingParams(temperature=0.3))
211
+ answer = f"{answer}\n\n\nSource(s) :\n\n{result_metadatas}"
212
+
213
+ if not st.session_state.documents_only:
214
+ answer = f'Documents Based :\n\n{answer}'
215
  else:
 
 
 
 
 
 
 
216
  torch.cuda.empty_cache()
217
+ gen_choice = outlines.generate.choice(llm, choices=['Domain-Specific Question', 'General Question'])
218
+ prompt = build_initial_prompt(query)
 
219
  action = gen_choice(prompt, max_tokens=128, sampling_params=SamplingParams(temperature=0))
220
+
221
+ if action == 'General Question':
222
+ prompt = open_query_prompt(past_messages, query)
223
+ answer = gen_text(prompt, max_tokens=300, sampling_params=SamplingParams(temperature=0.3))
 
 
 
 
 
 
 
 
 
 
 
 
224
  else:
225
  if st.session_state.documents_only:
226
  prompt = idk(query)