File size: 2,584 Bytes
26b1c4d
fb14459
26b1c4d
fb14459
26b1c4d
 
fb14459
d4d3968
fb14459
26b1c4d
 
 
fb14459
26b1c4d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7c782af
26b1c4d
7c782af
26b1c4d
 
 
9a5d4f4
 
 
d03a317
26b1c4d
 
bbd60cc
723330b
 
 
 
 
 
 
fb14459
 
723330b
 
 
 
 
 
 
 
 
 
 
 
 
fb14459
 
 
26b1c4d
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
import pandas as pd

context_data = pd.read_csv("drugs_side_effects_drugs_com.csv")

import os
from langchain_groq import ChatGroq

llm = ChatGroq(model="llama-3.1-70b-versatile",api_key=os.environ.get("GROQ_API_KEY"))

## Embedding model!
from langchain_huggingface import HuggingFaceEmbeddings
embed_model = HuggingFaceEmbeddings(model_name="mixedbread-ai/mxbai-embed-large-v1")

# create vector store!
from langchain_chroma import Chroma

vectorstore = Chroma(
    collection_name="medical_dataset_store",
    embedding_function=embed_model,
)

# add data to vector nstore
vectorstore.add_texts(context_data)

retriever = vectorstore.as_retriever()

from langchain_core.prompts import PromptTemplate

template = ("""You are a medical expert.
    Use the provided context to answer the question.
    If you don't know the answer, say so. Explain your answer in detail.
    Do not discuss the context in your response; just provide the answer directly.
    Context: {context}
    Question: {question}
    Answer:""")

rag_prompt = PromptTemplate.from_template(template)

from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough

rag_chain = (
    {"context": retriever, "question": RunnablePassthrough()}
    | rag_prompt
    | llm
    | StrOutputParser()
)
import gradio as gr

def rag_memory_stream(message, history):
    partial_text = ""
    for new_text in rag_chain.stream(message):
        partial_text += new_text
        yield partial_text

greetingsmessage = """Hello! Welcome to MediGuide ChatBot. I'm here to provide you with quick and accurate information on medical drugs. 
Whether you need details on usage, side effects , etc feel free to ask. Let's enhance patient care together!"""

initial_history = [("", greetingsmessage)]

title = "MediGuide ChatBot"

demo = gr.ChatInterface(fn=rag_memory_stream,
                        type="messages",
                        title=title,
                        description=description,
                        fill_height=True,
                        examples=examples,
                        theme="glass",
)

# it should be gr.ChatInterface not gr.Interface, I made this pull request. You can continue work on the greetingmessage.
#demo = gr.Interface(
#    title=title,
#    fn=rag_memory_stream,
#    inputs=[
#    gr.Chatbot(value=initial_history, label="Chat History"),"text"
#    ],
#    outputs=gr.Chatbot(label="Chat History"),
#    allow_flagging="never",
#    fill_height=True,
#    theme="glass",
#)


if __name__ == "__main__":
    demo.launch()