import pandas as pd context_data = pd.read_csv("drugs_side_effects_drugs_com.csv") import os from langchain_groq import ChatGroq llm = ChatGroq(model="llama-3.1-70b-versatile",api_key=os.environ.get("GROQ_API_KEY")) ## Embedding model! from langchain_huggingface import HuggingFaceEmbeddings embed_model = HuggingFaceEmbeddings(model_name="mixedbread-ai/mxbai-embed-large-v1") # create vector store! from langchain_chroma import Chroma vectorstore = Chroma( collection_name="medical_dataset_store", embedding_function=embed_model, ) # add data to vector nstore vectorstore.add_texts(context_data) retriever = vectorstore.as_retriever() from langchain_core.prompts import PromptTemplate template = ("""You are a medical expert. Use the provided context to answer the question. If you don't know the answer, say so. Explain your answer in detail. Do not discuss the context in your response; just provide the answer directly. Context: {context} Question: {question} Answer:""") rag_prompt = PromptTemplate.from_template(template) from langchain_core.output_parsers import StrOutputParser from langchain_core.runnables import RunnablePassthrough rag_chain = ( {"context": retriever, "question": RunnablePassthrough()} | rag_prompt | llm | StrOutputParser() ) import gradio as gr def rag_memory_stream(message, history): partial_text = "" for new_text in rag_chain.stream(message): partial_text += new_text yield partial_text greetingsmessage = """Hello! Welcome to MediGuide ChatBot. I'm here to provide you with quick and accurate information on medical drugs. Whether you need details on usage, side effects , etc feel free to ask. Let's enhance patient care together!""" initial_history = [("", greetingsmessage)] title = "MediGuide ChatBot" demo = gr.ChatInterface(fn=rag_memory_stream, type="messages", title=title, description=description, fill_height=True, examples=examples, theme="glass", ) # it should be gr.ChatInterface not gr.Interface, I made this pull request. You can continue work on the greetingmessage. #demo = gr.Interface( # title=title, # fn=rag_memory_stream, # inputs=[ # gr.Chatbot(value=initial_history, label="Chat History"),"text" # ], # outputs=gr.Chatbot(label="Chat History"), # allow_flagging="never", # fill_height=True, # theme="glass", #) if __name__ == "__main__": demo.launch()