File size: 6,231 Bytes
1b846eb
 
d92c861
1b846eb
847bc9e
1bd9947
fdf3b37
 
 
 
1bd9947
 
 
 
1e7346f
572cc27
5b102e3
6022fe1
79654d4
 
5b102e3
572cc27
d92c861
1b846eb
d92c861
 
 
 
 
 
 
 
5b102e3
 
fdf3b37
3ba2dcf
5b102e3
1556bea
 
 
 
13ac926
 
a14c65f
13ac926
66e97f3
 
 
6022fe1
fed63e7
13ac926
572cc27
3ba2dcf
1556bea
81dbe97
 
 
 
53a7825
81dbe97
 
 
 
 
 
 
 
 
 
 
 
 
1556bea
 
 
 
 
 
 
 
 
 
 
 
 
 
622a2f4
1556bea
 
a14c65f
1556bea
 
 
 
 
 
 
 
 
 
 
 
c4eb94d
1556bea
 
 
c4eb94d
82863bb
3ba2dcf
1556bea
3ba2dcf
 
1556bea
3ba2dcf
 
ed8536b
075d4e3
ed8536b
 
1556bea
b0357a6
3ba2dcf
ed8536b
 
 
 
 
c4eb94d
6022fe1
ed8536b
3ba2dcf
ed8536b
3ba2dcf
ed8536b
3ba2dcf
ed8536b
3ba2dcf
ed8536b
3ba2dcf
c79d50f
1e7346f
ed8536b
3ba2dcf
ed8536b
3ba2dcf
 
 
 
 
 
 
6022fe1
e782b03
1b846eb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
from fastapi import FastAPI, Request, HTTPException
from fastapi.responses import PlainTextResponse
from fastapi.middleware.cors import CORSMiddleware
from twilio.twiml.messaging_response import MessagingResponse
import os,openai
import google.generativeai as genai
from llama_index.llms import OpenAI
from llama_index import VectorStoreIndex, SimpleDirectoryReader
from llama_index.llms import OpenAI
from llama_index import StorageContext, load_index_from_storage

secret = os.environ["key"]

genai.configure(api_key=secret)
model = genai.GenerativeModel('gemini-1.5-flash')

import user_guide_sync

global index,query_engine
query_engine = index = None
#query_engine = (user_guide_sync.update_user_guide).as_query_engine()

app = FastAPI()

app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)

@app.post("/update_user_guide_data")
async def update_user_guide_data():
    user_guide_sync.update_user_guide()
    return "guide updated"



index = None

@app.post("/whatsapp")
async def reply_whatsapp(request: Request):
    
    form_data = await request.form()
    num_media = int(form_data.get("NumMedia", 0))
    from_number = form_data.get("From")
    message_body = form_data.get("Body")
    user_query = message_body

    response = MessagingResponse()
    #msg.media(GOOD_BOY_URL)
    try:
        openai.api_key = os.environ["OPENAI_API_KEY"]

        # Define the messages for the conversation
        messages = [
            {"role": "system", "content": "You are a helpful assistant."},
            {"role": "user", "content": f"always translate to english(only): '{user_query}'   give only translated part only without any extra details"  }
        ]
        # Make the API call
        gptresponse = openai.chat.completions.create(
            model="gpt-4o",
            messages=messages
        )

        user_query = gptresponse.choices[0].message.content

        print(f"translated text : {user_query}")


        
        global index  # Use the global index variable
        if index is None:  # Check if the index is already loaded
                from llama_index import StorageContext, load_index_from_storage  # Import necessary modules
                storage_context = StorageContext.from_defaults(persist_dir="llama_index")
                index = load_index_from_storage(storage_context=storage_context)
                print("Index loaded")
        else:
                print("Index already loaded")
        # Set up a retriever to fetch similar documents directly without full query processing
        retriever = index.as_retriever()
        # Retrieve the top similar documents based on the user query
        similar_docs = retriever.retrieve(user_query)  # Adjust `top_k` as needed
        # Prepare the context for LLM by concatenating the content of similar documents
        context = "\n\n".join([doc.node.text for doc in similar_docs])

        prompt = f"""
        context : {context}
        
        user query : {user_query}
        
                Instructions:
                - First, understand the user question carefully.
                - If you find the correct answer from the provided data, respond with detailed steps (1, 2, ...) and always include a more details link.
                - If the correct answer is not found in the provided data or proide the correct solution to user using data then output is only this : "contact our help desk". dont add extra anything
         """
        messages = [
            {"role": "user", "content": prompt  }
        ]
        
        # Make the API call
        gptresponse = openai.chat.completions.create(
            model="gpt-4o-mini",
            messages=messages
        )
        gpt_response = gptresponse.choices[0].message.content
        
        print(str(gpt_response).lower())
        if "contact our help desk" in str(gpt_response).lower() or "our help desk" in str(gpt_response).lower():
                print("help desk option")
    
                
                prompt = f"""
                         system:
                         you are parallax technologies chatbot design for answer the user question like a real human.
                         contact details suppor team link : https://projects.storemate.lk/customer  Email : [email protected] Youtube : https://www.youtube.com/channel/UCFkX9Fa-Qe6Qi4V5f0RcfSA Facebook : https://www.facebook.com/storemateinventory web link : https://storemate.lk
                         
                         only give single answer and don't give answer for general answers(this is CRM system for only pos system clients)
                         note : don't give any steps for solve the issues but give steps for sytem slow and performance related questions
                         user:{user_query}
                         """
                messages = [
                    {"role": "system", "content": "you are parallax technologies chatbot design for answer the user question like a real human"},
                    {"role": "user", "content": prompt}
                ]

                #gen_response = model.generate_content(prompt)
            
                gpt_response = openai.chat.completions.create(
                
                model="gpt-4o-mini",
                
                messages=messages,
                
                temperature=0,
                
                )
                
                response.message(str(gpt_response.choices[0].message.content))
                #response.message(gen_response.text)
                #response.message(gen_response.text +"\n\n"+default)
                return PlainTextResponse(str(response), media_type="application/xml")

        response.message(str(gpt_response)) 
        #response.message("https://storemate.lk")
        return PlainTextResponse(str(response), media_type="application/xml")   
    except Exception as e:
        print(str(e))
        response.message("please ask again...!")
        return PlainTextResponse(str(response), media_type="application/xml")  
    

# Run the application (Make sure you have the necessary setup to run FastAPI)