|
from llama_index.core import SimpleDirectoryReader |
|
from llama_index.llms.ollama import Ollama |
|
from llama_index.core.agent import ReActAgent |
|
from llama_index.core import VectorStoreIndex, SummaryIndex |
|
from langchain_community.embeddings.ollama import OllamaEmbeddings |
|
from llama_index.core.node_parser import SentenceSplitter |
|
from llama_index.core.tools import QueryEngineTool, ToolMetadata |
|
from llama_index.core import Settings, PromptTemplate |
|
from prompts.agent_prompts import DEFAULT_AGENT_PROMPT |
|
import asyncio |
|
from llama_index.core import Settings |
|
|
|
|
|
llm = Ollama(model="llama3") |
|
embed_model = OllamaEmbeddings(model="llama3") |
|
Settings.llm = llm |
|
Settings.embed_model = embed_model |
|
|
|
|
|
documents = SimpleDirectoryReader(input_files=["./alice.pdf"]).load_data() |
|
|
|
node_parser = SentenceSplitter() |
|
nodes = node_parser.get_nodes_from_documents(documents) |
|
vector_index = VectorStoreIndex(nodes=nodes, show_progress=True) |
|
summary_index = SummaryIndex(nodes=nodes, show_progress=True) |
|
|
|
vector_query_engine = vector_index.as_query_engine() |
|
summary_query_engine = summary_index.as_query_engine() |
|
|
|
|
|
react_documents = SimpleDirectoryReader(input_files=["./ReAct.pdf"]).load_data() |
|
|
|
nodes = node_parser.get_nodes_from_documents(react_documents) |
|
react_vector_index = VectorStoreIndex(nodes=nodes, show_progress=True) |
|
react_summary_index = SummaryIndex(nodes=nodes, show_progress=True) |
|
|
|
react_vector_query_engine = react_vector_index.as_query_engine() |
|
react_summary_query_engine = react_summary_index.as_query_engine() |
|
|
|
vector_tool = QueryEngineTool( |
|
query_engine=vector_query_engine, |
|
metadata=ToolMetadata( |
|
name="vector_tool", |
|
description="Useful tool to get info about alice via vector index search" |
|
), |
|
) |
|
summary_tool = QueryEngineTool( |
|
query_engine=summary_query_engine, |
|
metadata=ToolMetadata( |
|
name="summary_tool", |
|
description="Useful tool to get info about alice via summary index search" |
|
), |
|
) |
|
|
|
alice_agent = ReActAgent.from_tools( |
|
tools=[vector_tool, summary_tool], |
|
llm=llm, |
|
verbose=True, |
|
) |
|
|
|
react_vector_tool = QueryEngineTool( |
|
query_engine=react_vector_query_engine, |
|
metadata=ToolMetadata( |
|
name="react_vector_tool", |
|
description="Useful tool to get info about paper ReAct via vector index search" |
|
), |
|
) |
|
react_summary_tool = QueryEngineTool( |
|
query_engine=react_summary_query_engine, |
|
metadata=ToolMetadata( |
|
name="react_summary_tool", |
|
description="Useful tool to get info about paper ReAct via summary index search" |
|
), |
|
) |
|
react_agent = ReActAgent.from_tools( |
|
tools=[react_vector_tool, react_summary_tool], |
|
llm=llm, |
|
verbose=True, |
|
) |
|
|
|
alice_doc_tool = QueryEngineTool( |
|
query_engine=alice_agent, |
|
metadata=ToolMetadata( |
|
name="alice_doc_tool", |
|
description="Useful tool to answer question related to Alice In Wonderland" |
|
), |
|
) |
|
react_doc_tool = QueryEngineTool( |
|
query_engine=react_agent, |
|
metadata=ToolMetadata( |
|
name="react_doc_tool", |
|
description="Useful tool to answer question related to ReAct paper" |
|
), |
|
) |
|
|
|
agent_tools = [alice_doc_tool, react_doc_tool] |
|
|
|
from llama_index.core.objects import ObjectIndex |
|
obj_index = ObjectIndex.from_objects( |
|
agent_tools, |
|
index_cls=VectorStoreIndex, |
|
) |
|
top_agent = ReActAgent.from_tools( |
|
tool_retriever=obj_index.as_retriever(similarity_top_k=1), |
|
verbose=True |
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
response = asyncio.run(top_agent.astream_chat("Why Reason and Action work?")) |
|
print(response) |