File size: 1,891 Bytes
e9e927e
 
c6e8b1e
 
 
e9e927e
4e8425b
e9e927e
c6e8b1e
94300cf
c6e8b1e
d9c6f4b
c6e8b1e
 
0aa4ed1
c6e8b1e
 
0aa4ed1
e9e927e
0aa4ed1
 
 
 
 
2201a8b
0aa4ed1
e9e927e
 
 
c6e8b1e
0aa4ed1
94300cf
e9e927e
2201a8b
e9e927e
9b4627e
e9e927e
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
import gradio as gr
from huggingface_hub import InferenceClient
import os
from mistralai.client import MistralClient
from mistralai.models.chat_completion import ChatMessage

client = MistralClient(api_key= os.getenv('mistral_api_key'))

# Function to get chat response
def get_mistral_response(user_input: str, history: list[tuple[str, str]], context: str):
    messages = [ChatMessage(role='user', content='you are helpful assistant answering question based on given context. provide a highly consie and precise answer, along with a citation from original context in the format of JSON.'),
                ChatMessage(role='assistant', content='I understand.')] # system prompt has been baked into fine-tuned model. 
    
    context_with_history = context # placeholder
    response_after = client.chat(
        model='ft:open-mistral-7b:1c04df3c:20240629:7010a3c8',
        messages=[ChatMessage(role='user', content=f'''CONTEXT:{context_with_history}  QUESTION: {user_input}''')]
    ).choices[0].message.content

    response_before = client.chat(
        model='open-mistral-7b',
        messages=[ChatMessage(role='user', content=f'''CONTEXT:{context_with_history}  QUESTION: {user_input}''')]
    ).choices[0].message.content
    
    response = f"""**Before fine-tune**: <br> {response_before} <br><br> **After fine-tune**:<br><span style="color:green"> {response_after} </span><br>"""
    return response


demo = gr.ChatInterface(
    get_mistral_response,
    title='no-nonsense QA bot',
    description="After fine-tuning, the bot answers your question with a grounded citation. Paste your contextual information in the box below.",
    additional_inputs=[
        gr.Textbox(value="", label="Answer will be based on the context input here", lines=5),
    ],
    additional_inputs_accordion = gr.Accordion(label="", open=False),
)


if __name__ == "__main__":
    demo.launch()