{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": [],
"authorship_tag": "ABX9TyM7DVBQbBv7iSjrA/U71HaV",
"include_colab_link": true
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
}
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "view-in-github",
"colab_type": "text"
},
"source": [
"
"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "JboB5VaCJUrb",
"outputId": "2433bc46-9d7f-476e-bfe9-0e4be5f4e51a"
},
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m12.5/12.5 MB\u001b[0m \u001b[31m24.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25h"
]
}
],
"source": [
"!pip install -q llama-index==0.10.5 openai==1.12.0 tiktoken==0.6.0 llama-index-tools-google==0.1.3"
]
},
{
"cell_type": "code",
"source": [
"import os\n",
"\n",
"# Set the \"OPENAI_API_KEY\" in the Python environment. Will be used by OpenAI client later.\n",
"os.environ[\"OPENAI_API_KEY\"] = \"\""
],
"metadata": {
"id": "1NKAn5scN_g9"
},
"execution_count": 5,
"outputs": []
},
{
"cell_type": "markdown",
"source": [
"# Define Google Search Tool"
],
"metadata": {
"id": "0LMypoqUyuXq"
}
},
{
"cell_type": "code",
"source": [
"from llama_index.tools.google import GoogleSearchToolSpec\n",
"\n",
"tool_spec = GoogleSearchToolSpec(key=\"[GOOGLE_API_KEY]\", engine=\"[GOOGLE_ENGINE_ID]\")"
],
"metadata": {
"id": "4Q7sc69nJvWI"
},
"execution_count": 54,
"outputs": []
},
{
"cell_type": "code",
"source": [
"# Import and initialize our tool spec\n",
"from llama_index.core.tools.tool_spec.load_and_search import LoadAndSearchToolSpec\n",
"\n",
"# Wrap the google search tool to create an index on top of the returned Google search\n",
"wrapped_tool = LoadAndSearchToolSpec.from_defaults(\n",
" tool_spec.to_tool_list()[0],\n",
").to_tool_list()"
],
"metadata": {
"id": "VrbuIOaMeOIf"
},
"execution_count": 69,
"outputs": []
},
{
"cell_type": "markdown",
"source": [
"# Create the Agent"
],
"metadata": {
"id": "T3ENpLyBy7UL"
}
},
{
"cell_type": "code",
"source": [
"from llama_index.agent.openai import OpenAIAgent\n",
"\n",
"agent = OpenAIAgent.from_tools(wrapped_tool, verbose=False)"
],
"metadata": {
"id": "-_Ab47ppK8b2"
},
"execution_count": 70,
"outputs": []
},
{
"cell_type": "code",
"source": [
"res = agent.chat(\"How many parameters LLaMA2 model has?\")"
],
"metadata": {
"id": "YcUyz1-FlCQ8"
},
"execution_count": 71,
"outputs": []
},
{
"cell_type": "code",
"source": [
"res.response"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 35
},
"id": "w4wK5sY-lOOv",
"outputId": "8090a106-6fac-4514-fdbd-c72a01b28169"
},
"execution_count": 72,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"'The LLaMA2 model has parameters available in three different sizes: 7 billion, 13 billion, and 70 billion.'"
],
"application/vnd.google.colaboratory.intrinsic+json": {
"type": "string"
}
},
"metadata": {},
"execution_count": 72
}
]
},
{
"cell_type": "code",
"source": [
"res.sources"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "TM_cvBA1nTJM",
"outputId": "0bf3533a-c62d-4d0d-bd76-76c043477042"
},
"execution_count": 73,
"outputs": [
{
"output_type": "execute_result",
"data": {
"text/plain": [
"[ToolOutput(content='Content loaded! You can now search the information using read_google_search', tool_name='google_search', raw_input={'args': (), 'kwargs': {'query': 'parameters of LLaMA2 model'}}, raw_output='Content loaded! You can now search the information using read_google_search', is_error=False),\n",
" ToolOutput(content='Answer: The parameters of the LLaMA2 model are available in three different sizes: 7 billion, 13 billion, and 70 billion.', tool_name='read_google_search', raw_input={'args': (), 'kwargs': {'query': 'parameters of LLaMA2 model'}}, raw_output='Answer: The parameters of the LLaMA2 model are available in three different sizes: 7 billion, 13 billion, and 70 billion.', is_error=False)]"
]
},
"metadata": {},
"execution_count": 73
}
]
},
{
"cell_type": "code",
"source": [],
"metadata": {
"id": "SPUgKiKpygLn"
},
"execution_count": null,
"outputs": []
}
]
}