import os import openai from pathlib import Path from langchain.embeddings.openai import OpenAIEmbeddings from langchain.indexes import VectorstoreIndexCreator from langchain.document_loaders import TextLoader from langchain.chat_models import ChatOpenAI import gradio as gr def index_txt(directory): files = directory.glob("*.txt") loaders = [TextLoader(str(file)) for file in files] return VectorstoreIndexCreator().from_loaders(loaders) def vector_search(natural_lang_query): llm = ChatOpenAI(temperature=0, model_name="gpt-4") query_result = index.query_with_sources(natural_lang_query, llm=llm) final_result = "Answer: " + query_result['answer'] final_result += f"\n Sources: {query_result['sources']}" return final_result def create_gradio_interface(title, description): """Create a Gradio interface with a single text input and a single text output.""" interface = gr.Interface( fn=vector_search, inputs=[ gr.inputs.Textbox(label="What would you like to ask your data?") ], outputs=gr.outputs.Textbox(label="Results"), title=title, description=description ) return interface # Define path for documents output_dir = Path("docs/") # Launch the interface index = index_txt(output_dir) interface = create_gradio_interface(title="ChatBot - Question answering across Pfizer Comirnaty Documents", description=( "Semantic search: Enter a query to receive an answer with cited source documents.\n\n" "DISCLAIMER: This is an early alpha product and not intended for production use.") ) interface.launch()