# This file was autogenerated by uv via the following command: # uv pip compile pyproject.toml -o requirements.txt aiofiles==23.2.1 # via gradio aiohappyeyeballs==2.4.4 # via aiohttp aiohttp==3.11.11 # via # datasets # fsspec # langchain # llama-index-core aiosignal==1.3.2 # via aiohttp annotated-types==0.7.0 # via pydantic anyio==4.8.0 # via # gradio # httpx # openai # starlette apscheduler==3.11.0 # via daily-papers (pyproject.toml) async-timeout==4.0.3 # via # aiohttp # langchain attrs==24.3.0 # via aiohttp beautifulsoup4==4.12.3 # via llama-index-readers-file bitarray==3.0.0 # via colbert-ai blinker==1.9.0 # via flask catalogue==2.0.10 # via srsly certifi==2024.12.14 # via # httpcore # httpx # llama-cloud # requests charset-normalizer==3.4.1 # via requests click==8.1.8 # via # flask # llama-parse # nltk # typer # uvicorn colbert-ai==0.2.19 # via ragatouille dataclasses-json==0.6.7 # via llama-index-core datasets==3.2.0 # via # daily-papers (pyproject.toml) # colbert-ai deprecated==1.2.15 # via llama-index-core dill==0.3.8 # via # datasets # multiprocess dirtyjson==1.0.8 # via llama-index-core distro==1.9.0 # via openai exceptiongroup==1.2.2 # via anyio faiss-cpu==1.9.0.post1 # via ragatouille fast-pytorch-kmeans==0.2.0.1 # via ragatouille fastapi==0.115.6 # via gradio ffmpy==0.5.0 # via gradio filelock==3.16.1 # via # datasets # huggingface-hub # torch # transformers # triton filetype==1.2.0 # via llama-index-core flask==3.1.0 # via colbert-ai frozenlist==1.5.0 # via # aiohttp # aiosignal fsspec==2024.9.0 # via # datasets # gradio-client # huggingface-hub # llama-index-core # torch git-python==1.0.3 # via colbert-ai gitdb==4.0.12 # via gitpython gitpython==3.1.44 # via git-python gradio==5.12.0 # via # daily-papers (pyproject.toml) # gradio-calendar gradio-calendar==0.0.6 # via daily-papers (pyproject.toml) gradio-client==1.5.4 # via gradio greenlet==3.1.1 # via sqlalchemy h11==0.14.0 # via # httpcore # uvicorn hf-transfer==0.1.9 # via daily-papers (pyproject.toml) httpcore==1.0.7 # via httpx httpx==0.28.1 # via # gradio # gradio-client # langsmith # llama-cloud # llama-index-core # openai # safehttpx huggingface-hub==0.27.1 # via # datasets # gradio # gradio-client # sentence-transformers # tokenizers # transformers idna==3.10 # via # anyio # httpx # requests # yarl itsdangerous==2.2.0 # via flask jinja2==3.1.5 # via # flask # gradio # torch jiter==0.8.2 # via openai joblib==1.4.2 # via # nltk # scikit-learn jsonpatch==1.33 # via langchain-core jsonpointer==3.0.0 # via jsonpatch langchain==0.3.14 # via ragatouille langchain-core==0.3.29 # via # langchain # langchain-text-splitters # ragatouille langchain-text-splitters==0.3.5 # via langchain langsmith==0.2.10 # via # langchain # langchain-core llama-cloud==0.1.8 # via llama-index-indices-managed-llama-cloud llama-index==0.12.10 # via ragatouille llama-index-agent-openai==0.4.1 # via # llama-index # llama-index-program-openai llama-index-cli==0.4.0 # via llama-index llama-index-core==0.12.10.post1 # via # llama-index # llama-index-agent-openai # llama-index-cli # llama-index-embeddings-openai # llama-index-indices-managed-llama-cloud # llama-index-llms-openai # llama-index-multi-modal-llms-openai # llama-index-program-openai # llama-index-question-gen-openai # llama-index-readers-file # llama-index-readers-llama-parse # llama-parse llama-index-embeddings-openai==0.3.1 # via # llama-index # llama-index-cli llama-index-indices-managed-llama-cloud==0.6.3 # via llama-index llama-index-llms-openai==0.3.13 # via # llama-index # llama-index-agent-openai # llama-index-cli # llama-index-multi-modal-llms-openai # llama-index-program-openai # llama-index-question-gen-openai llama-index-multi-modal-llms-openai==0.4.2 # via llama-index llama-index-program-openai==0.3.1 # via # llama-index # llama-index-question-gen-openai llama-index-question-gen-openai==0.3.0 # via llama-index llama-index-readers-file==0.4.3 # via llama-index llama-index-readers-llama-parse==0.4.0 # via llama-index llama-parse==0.5.19 # via llama-index-readers-llama-parse markdown-it-py==3.0.0 # via rich markupsafe==2.1.5 # via # gradio # jinja2 # werkzeug marshmallow==3.25.0 # via dataclasses-json mdurl==0.1.2 # via markdown-it-py mpmath==1.3.0 # via sympy multidict==6.1.0 # via # aiohttp # yarl multiprocess==0.70.16 # via datasets mypy-extensions==1.0.0 # via typing-inspect nest-asyncio==1.6.0 # via llama-index-core networkx==3.4.2 # via # llama-index-core # torch ninja==1.11.1.3 # via colbert-ai nltk==3.9.1 # via # llama-index # llama-index-core numpy==1.26.4 # via # datasets # faiss-cpu # fast-pytorch-kmeans # gradio # langchain # llama-index-core # onnx # pandas # scikit-learn # scipy # sentence-transformers # transformers # voyager nvidia-cublas-cu12==12.4.5.8 # via # nvidia-cudnn-cu12 # nvidia-cusolver-cu12 # torch nvidia-cuda-cupti-cu12==12.4.127 # via torch nvidia-cuda-nvrtc-cu12==12.4.127 # via torch nvidia-cuda-runtime-cu12==12.4.127 # via torch nvidia-cudnn-cu12==9.1.0.70 # via torch nvidia-cufft-cu12==11.2.1.3 # via torch nvidia-curand-cu12==10.3.5.147 # via torch nvidia-cusolver-cu12==11.6.1.9 # via torch nvidia-cusparse-cu12==12.3.1.170 # via # nvidia-cusolver-cu12 # torch nvidia-ml-py==12.560.30 # via pynvml nvidia-nccl-cu12==2.21.5 # via torch nvidia-nvjitlink-cu12==12.4.127 # via # nvidia-cusolver-cu12 # nvidia-cusparse-cu12 # torch nvidia-nvtx-cu12==12.4.127 # via torch onnx==1.17.0 # via ragatouille openai==1.59.6 # via # llama-index-agent-openai # llama-index-embeddings-openai # llama-index-llms-openai orjson==3.10.14 # via # gradio # langsmith packaging==24.2 # via # datasets # faiss-cpu # gradio # gradio-client # huggingface-hub # langchain-core # marshmallow # transformers pandas==2.2.3 # via # daily-papers (pyproject.toml) # datasets # gradio # llama-index-readers-file pillow==11.1.0 # via # gradio # llama-index-core # sentence-transformers propcache==0.2.1 # via # aiohttp # yarl protobuf==5.29.3 # via onnx pyarrow==18.1.0 # via datasets pydantic==2.10.5 # via # fastapi # gradio # langchain # langchain-core # langsmith # llama-cloud # llama-index-core # llama-parse # openai pydantic-core==2.27.2 # via pydantic pydub==0.25.1 # via gradio pygments==2.19.1 # via rich pynvml==12.0.0 # via fast-pytorch-kmeans pypdf==5.1.0 # via llama-index-readers-file python-dateutil==2.9.0.post0 # via pandas python-dotenv==1.0.1 # via colbert-ai python-multipart==0.0.20 # via gradio pytz==2024.2 # via pandas pyyaml==6.0.2 # via # datasets # gradio # huggingface-hub # langchain # langchain-core # llama-index-core # transformers ragatouille==0.0.8.post4 # via daily-papers (pyproject.toml) regex==2024.11.6 # via # nltk # tiktoken # transformers requests==2.32.3 # via # datasets # huggingface-hub # langchain # langsmith # llama-index-core # requests-toolbelt # tiktoken # transformers requests-toolbelt==1.0.0 # via langsmith rich==13.9.4 # via typer ruff==0.9.1 # via gradio safehttpx==0.1.6 # via gradio safetensors==0.5.2 # via transformers scikit-learn==1.6.1 # via sentence-transformers scipy==1.15.1 # via # colbert-ai # scikit-learn # sentence-transformers semantic-version==2.10.0 # via gradio sentence-transformers==2.7.0 # via ragatouille setuptools==75.8.0 # via daily-papers (pyproject.toml) shellingham==1.5.4 # via typer six==1.17.0 # via python-dateutil smmap==5.0.2 # via gitdb sniffio==1.3.1 # via # anyio # openai soupsieve==2.6 # via beautifulsoup4 sqlalchemy==2.0.37 # via # langchain # llama-index-core srsly==2.4.8 # via ragatouille starlette==0.41.3 # via # fastapi # gradio striprtf==0.0.26 # via llama-index-readers-file sympy==1.13.1 # via torch tenacity==9.0.0 # via # langchain # langchain-core # llama-index-core threadpoolctl==3.5.0 # via scikit-learn tiktoken==0.8.0 # via llama-index-core tokenizers==0.21.0 # via transformers tomlkit==0.13.2 # via gradio torch==2.5.1 # via # fast-pytorch-kmeans # ragatouille # sentence-transformers tqdm==4.67.1 # via # colbert-ai # datasets # huggingface-hub # llama-index-core # nltk # openai # sentence-transformers # transformers transformers==4.48.0 # via # colbert-ai # ragatouille # sentence-transformers triton==3.1.0 # via torch typer==0.15.1 # via gradio typing-extensions==4.12.2 # via # anyio # fastapi # gradio # gradio-client # huggingface-hub # langchain-core # llama-index-core # multidict # openai # pydantic # pydantic-core # pypdf # rich # sqlalchemy # torch # typer # typing-inspect # uvicorn typing-inspect==0.9.0 # via # dataclasses-json # llama-index-core tzdata==2024.2 # via pandas tzlocal==5.2 # via apscheduler ujson==5.10.0 # via colbert-ai urllib3==2.3.0 # via requests uvicorn==0.34.0 # via gradio voyager==2.1.0 # via ragatouille websockets==14.1 # via gradio-client werkzeug==3.1.3 # via flask wrapt==1.17.1 # via # deprecated # llama-index-core xxhash==3.5.0 # via datasets yarl==1.18.3 # via aiohttp