import openai from time import time import os import logging import streamlit openai.api_key = st.secrets["openai_api_key"] def gpt_rephrase(fact): # Dynamically generate the prompt to rephrase the fact as a PubMed query using GPT3.5 prompt = f"Rephrase the following fact as a Pubmed search query.\n\ FACT: {fact}\n\ PUBMED QUERY:" try: response = openai.Completion.create( model="text-ada-001", prompt=prompt, max_tokens=250, temperature=0 ) response = response['choices'][0]['text'].strip() filename = '%s_gpt3.txt' % time() # Create the logs folder if it does not exist if not os.path.exists('gpt3_rephrase_logs'): os.makedirs('gpt3_rephrase_logs') # Save the whole prompt and the response so that we can inspect it when necessary with open('gpt3_rephrase_logs/%s' % filename, 'w', encoding="utf-8") as outfile: outfile.write('PROMPT:\n\n' + prompt + '\n\n###############\n\nRESPONSE:\n\n' + response) return response except Exception as e: logging.error("Error communicating with OpenAI (rephrase): ", exc_info=e) def check_fact(evidence, fact): # Dynamically generate the prompt to check the fact against the given PubMed article conclusion/abstract prompt = f"Based exclusively on the evidence provided, is the following hypothesis True, False or Undetermined?\n\ EVIDENCE: {evidence}\n \ HYPOTHESIS: {fact}\n \ ANSWER:" try: response = openai.Completion.create( model="text-ada-001", prompt=prompt, max_tokens=2, temperature=0 ) response = response['choices'][0]['text'].strip() response = response.replace('.', '') filename = '%s_gpt3.txt' % time() if not os.path.exists('gpt3_factchecking_logs'): os.makedirs('gpt3_factchecking_logs') with open('gpt3_factchecking_logs/%s' % filename, 'w', encoding="utf-8") as outfile: outfile.write('PROMPT:\n\n' + prompt + '\n\n###############\n\nRESPONSE:\n\n' + response) return response except Exception as e: logging.error("Error communicating with OpenAI (check_fact): ", exc_info=e) def gpt35_rephrase(fact): # Dynamically generate the prompt to rephrase the fact as a PubMed query using GPT3.5 turbo - lower cost than 3.5 prompt = f"Rephrase the following fact as a Pubmed search query.\n\ FACT: {fact}\n\ PUBMED QUERY:" try: response = openai.ChatCompletion.create( model="gpt-3.5-turbo", messages=[ {"role": "user", "content": prompt} ] ) response = response['choices'][0]['message']['content'].strip() filename = '%s_gpt3.txt' % time() if not os.path.exists('gpt35_rephrase_logs'): os.makedirs('gpt35_rephrase_logs') with open('gpt35_rephrase_logs/%s' % filename, 'w', encoding="utf-8") as outfile: outfile.write('PROMPT:\n\n' + prompt + '\n\n###############\n\nRESPONSE:\n\n' + response) return response except Exception as e: logging.error("Error communicating with OpenAI (gpt35_rephrase): ", exc_info=e)