File size: 3,961 Bytes
15a77b6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
import streamlit as st
from hugchat import hugchat
from hugchat.login import Login
from metaphor_python import Metaphor


# App title
st.set_page_config(page_title="HugChat with Metaphor")

# Define Metaphor API key
METAPHOR_API_KEY = "1cd6d71b-e530-4ea3-bb18-e9599e641f66"  # Replace with your Metaphor API key
with st.sidebar:
    st.title('πŸ€—πŸ’¬ HugChat x Metaphor')
    if ('EMAIL' in st.secrets) and ('PASS' in st.secrets):
        st.success('HuggingFace Login credentials already provided!', icon='βœ…')
        hf_email = st.secrets['EMAIL']
        hf_pass = st.secrets['PASS']
    else:
        hf_email = st.text_input('Enter E-mail:', type='password')
        hf_pass = st.text_input('Enter password:', type='password')
        if not (hf_email and hf_pass):
            st.warning('Please enter your credentials!', icon='⚠️')
        else:
            st.success('Proceed to entering your prompt message!', icon='πŸ‘‰')

# Create Metaphor client
metaphor = Metaphor(METAPHOR_API_KEY)

# Store LLM generated responses
if "messages" not in st.session_state:
    st.session_state.messages = [{"role": "assistant", "content": "Heya Metaphor bot this side, how may i assist ?"}]

# Display or clear chat messages
for message in st.session_state.messages:
    with st.chat_message(message["role"]):
        st.write(message["content"])

def clear_chat_history():
    st.session_state.messages = [{"role": "assistant", "content": "Heya Metaphor bot this side, how may i assist?"}]
st.sidebar.button('Clear Chat History', on_click=clear_chat_history)

# Function for generating LLM response
def generate_response(prompt_input, email, passwd):
    # Hugging Face Login
    sign = Login(email, passwd)
    cookies = sign.login()

    # Create ChatBot
    chatbot = hugchat.ChatBot(cookies=cookies.get_dict())

    # Check if the user's input is a specific question
    if prompt_input.strip().lower() in ["who are you?", "who made you?"]:
        response = "I am an AI LLama Hugchat of Huggingface  which is integrated with Metaphor in the backend."
    else:
        # Fetch Metaphor search results
        search_options = {
            "query": prompt_input,
            "num_results": 5  # You can adjust the number of results as needed
        }
        try:
            search_response = metaphor.search(**search_options)

            # Extract links and summaries from the Metaphor search results
            links_and_summaries = [
                f"Title: {result.title}\nURL: {result.url}\nSummary: {result.extract}\n---"
                for result in search_response.results
            ]

            # Combine the user's query and Metaphor output with the previous conversation
            string_dialogue = "You are a helpful assistant."
            for dict_message in st.session_state.messages:
                if dict_message["role"] == "user":
                    string_dialogue += "User: " + dict_message["content"] + "\n\n"
                else:
                    string_dialogue += "Assistant: " + dict_message["content"] + "\n\n"

            prompt = f"{string_dialogue}\n{prompt_input}\n{''.join(links_and_summaries)}\n Assistant: "
            response = chatbot.chat(prompt)
        except Exception as e:
            response = str(e)

    return response
# User-provided prompt
if prompt := st.chat_input(disabled=not (hf_email and hf_pass)):
    st.session_state.messages.append({"role": "user", "content": prompt})
    with st.chat_message("user"):
        st.write(prompt)

    # Generate a new response if the last message is not from the assistant
    if st.session_state.messages[-1]["role"] != "assistant":
        with st.chat_message("assistant"):
            with st.spinner("Thinking..."):
                response = generate_response(prompt, hf_email, hf_pass)
                st.write(response)

        message = {"role": "assistant", "content": response}
        st.session_state.messages.append(message)