Spaces:
Runtime error
Runtime error
# from langchain.document_loaders import TextLoader,DirectoryLoader | |
# from langchain.text_splitter import RecursiveCharacterTextSplitter | |
# from langchain_google_genai import GoogleGenerativeAIEmbeddings | |
# embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001") | |
# import os | |
# print(os.path.exists("Data/")) # Check if directory exists | |
# print(os.listdir("Data/")) | |
# loader = TextLoader("Data/tkrupt.txt") | |
# docs = loader.load() | |
# splitter = RecursiveCharacterTextSplitter(chunk_size = 500 , chunk_overlap = 100) | |
# chunks = splitter.split_documents(docs) | |
# print(len(chunks)) | |
from langchain_google_genai import ChatGoogleGenerativeAI | |
from langchain_google_genai import GoogleGenerativeAIEmbeddings | |
from langchain_pinecone import PineconeVectorStore | |
google_api_key = "AIzaSyAhgj1-KUauE7QhOOUdVJrvffZ9mHNvCms" | |
embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001",google_api_key="AIzaSyAhgj1-KUauE7QhOOUdVJrvffZ9mHNvCms") | |
llm = ChatGoogleGenerativeAI( | |
model="gemini-1.5-pro", | |
temperature=0.5, | |
max_tokens=None, | |
timeout=None, | |
max_retries=2, | |
api_key=google_api_key | |
) | |
from dotenv import load_dotenv | |
load_dotenv() | |
doc_search = PineconeVectorStore.from_existing_index( | |
index_name='customer-support', | |
embedding=embeddings | |
) | |
retriever = doc_search.as_retriever(searh_type = 'similarity', search_kwards={'k':3}) | |
# print(retriever.invoke("What services they provide ? ")) | |
from langchain.chains import create_retrieval_chain | |
from langchain.chains.combine_documents import create_stuff_documents_chain | |
from langchain_core.prompts import ChatPromptTemplate | |
system_prompt = ( | |
"You are a helpful assistant as Tkrupt which is a software solution delivering company" | |
"Use the following context to answer the question" | |
"If you dont know the answer , just say you dont know the answer" | |
"\n\n" | |
"{context}" | |
) | |
prompt = ChatPromptTemplate( | |
[ | |
("system",system_prompt), | |
('human',"{input}") | |
] | |
) | |
question_answer_chain = create_stuff_documents_chain(llm, prompt) | |
rag_chain = create_retrieval_chain(retriever, question_answer_chain) | |
print(rag_chain.invoke({'input':"what is supra GTA ?"})['answer']) |