aie4-final / requirements.txt
pattonma's picture
explicit reqs
8ce28a9
raw
history blame
9.56 kB
#
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# pip-compile requirements.in
#
aiofiles==23.2.1
# via chainlit
aiohappyeyeballs==2.4.3
# via aiohttp
aiohttp==3.10.10
# via
# langchain
# langchain-community
aiosignal==1.3.1
# via aiohttp
annotated-types==0.7.0
# via pydantic
anthropic==0.36.0
# via langchain-anthropic
anyio==4.6.0
# via
# anthropic
# asyncer
# httpx
# openai
# starlette
# watchfiles
async-timeout==4.0.3
# via
# aiohttp
# langchain
asyncer==0.0.7
# via chainlit
attrs==24.2.0
# via aiohttp
bidict==0.23.1
# via python-socketio
certifi==2024.8.30
# via
# httpcore
# httpx
# requests
chainlit==1.2.0
# via -r requirements.in
charset-normalizer==3.4.0
# via requests
chevron==0.14.0
# via literalai
click==8.1.7
# via
# chainlit
# uvicorn
dataclasses-json==0.6.7
# via
# chainlit
# langchain-community
defusedxml==0.7.1
# via langchain-anthropic
deprecated==1.2.14
# via
# opentelemetry-api
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
# opentelemetry-semantic-conventions
distro==1.9.0
# via
# anthropic
# openai
exceptiongroup==1.2.2
# via anyio
fastapi==0.112.4
# via chainlit
filelock==3.16.1
# via
# huggingface-hub
# torch
# transformers
# triton
filetype==1.2.0
# via chainlit
frozenlist==1.4.1
# via
# aiohttp
# aiosignal
fsspec==2024.9.0
# via
# huggingface-hub
# torch
googleapis-common-protos==1.65.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
greenlet==3.1.1
# via sqlalchemy
grpcio==1.66.2
# via
# grpcio-tools
# opentelemetry-exporter-otlp-proto-grpc
# qdrant-client
grpcio-tools==1.62.3
# via qdrant-client
h11==0.14.0
# via
# httpcore
# uvicorn
# wsproto
h2==4.1.0
# via httpx
hpack==4.0.0
# via h2
httpcore==1.0.6
# via httpx
httpx[http2]==0.27.2
# via
# anthropic
# chainlit
# langsmith
# literalai
# openai
# qdrant-client
huggingface-hub==0.25.2
# via
# langchain-huggingface
# sentence-transformers
# tokenizers
# transformers
hyperframe==6.0.1
# via h2
idna==3.10
# via
# anyio
# httpx
# requests
# yarl
importlib-metadata==8.4.0
# via opentelemetry-api
jinja2==3.1.4
# via torch
jiter==0.6.1
# via
# anthropic
# openai
joblib==1.4.2
# via scikit-learn
jsonpatch==1.33
# via langchain-core
jsonpointer==3.0.0
# via jsonpatch
langchain==0.3.3
# via
# -r requirements.in
# langchain-community
langchain-anthropic==0.2.3
# via -r requirements.in
langchain-community==0.3.2
# via
# -r requirements.in
# langchain-experimental
langchain-core==0.3.10
# via
# langchain
# langchain-anthropic
# langchain-community
# langchain-experimental
# langchain-huggingface
# langchain-openai
# langchain-qdrant
# langchain-text-splitters
langchain-experimental==0.3.2
# via -r requirements.in
langchain-huggingface==0.1.0
# via -r requirements.in
langchain-openai==0.2.2
# via -r requirements.in
langchain-qdrant==0.1.4
# via -r requirements.in
langchain-text-splitters==0.3.0
# via langchain
langsmith==0.1.134
# via
# langchain
# langchain-community
# langchain-core
lazify==0.4.0
# via chainlit
literalai==0.0.607
# via chainlit
markupsafe==3.0.1
# via jinja2
marshmallow==3.22.0
# via dataclasses-json
mpmath==1.3.0
# via sympy
multidict==6.1.0
# via
# aiohttp
# yarl
mypy-extensions==1.0.0
# via typing-inspect
nest-asyncio==1.6.0
# via chainlit
networkx==3.4
# via torch
numpy==1.26.4
# via
# chainlit
# langchain
# langchain-community
# qdrant-client
# scikit-learn
# scipy
# transformers
nvidia-cublas-cu12==12.1.3.1
# via
# nvidia-cudnn-cu12
# nvidia-cusolver-cu12
# torch
nvidia-cuda-cupti-cu12==12.1.105
# via torch
nvidia-cuda-nvrtc-cu12==12.1.105
# via torch
nvidia-cuda-runtime-cu12==12.1.105
# via torch
nvidia-cudnn-cu12==9.1.0.70
# via torch
nvidia-cufft-cu12==11.0.2.54
# via torch
nvidia-curand-cu12==10.3.2.106
# via torch
nvidia-cusolver-cu12==11.4.5.107
# via torch
nvidia-cusparse-cu12==12.1.0.106
# via
# nvidia-cusolver-cu12
# torch
nvidia-nccl-cu12==2.20.5
# via torch
nvidia-nvjitlink-cu12==12.6.77
# via
# nvidia-cusolver-cu12
# nvidia-cusparse-cu12
nvidia-nvtx-cu12==12.1.105
# via torch
openai==1.51.2
# via langchain-openai
opentelemetry-api==1.27.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
# opentelemetry-instrumentation
# opentelemetry-sdk
# opentelemetry-semantic-conventions
# uptrace
opentelemetry-exporter-otlp==1.27.0
# via uptrace
opentelemetry-exporter-otlp-proto-common==1.27.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
opentelemetry-exporter-otlp-proto-grpc==1.27.0
# via opentelemetry-exporter-otlp
opentelemetry-exporter-otlp-proto-http==1.27.0
# via opentelemetry-exporter-otlp
opentelemetry-instrumentation==0.48b0
# via uptrace
opentelemetry-proto==1.27.0
# via
# opentelemetry-exporter-otlp-proto-common
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
opentelemetry-sdk==1.27.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
# uptrace
opentelemetry-semantic-conventions==0.48b0
# via opentelemetry-sdk
orjson==3.10.7
# via langsmith
packaging==23.2
# via
# chainlit
# huggingface-hub
# langchain-core
# literalai
# marshmallow
# transformers
pillow==10.4.0
# via sentence-transformers
portalocker==2.10.1
# via qdrant-client
propcache==0.2.0
# via yarl
protobuf==4.25.5
# via
# googleapis-common-protos
# grpcio-tools
# opentelemetry-proto
pydantic==2.9.2
# via
# anthropic
# chainlit
# fastapi
# langchain
# langchain-anthropic
# langchain-core
# langchain-qdrant
# langsmith
# literalai
# openai
# pydantic-settings
# qdrant-client
pydantic-core==2.23.4
# via pydantic
pydantic-settings==2.5.2
# via langchain-community
pyjwt==2.9.0
# via chainlit
python-dotenv==1.0.1
# via
# chainlit
# pydantic-settings
python-engineio==4.9.1
# via python-socketio
python-multipart==0.0.9
# via chainlit
python-socketio==5.11.4
# via chainlit
pyyaml==6.0.2
# via
# huggingface-hub
# langchain
# langchain-community
# langchain-core
# transformers
qdrant-client==1.12.0
# via
# -r requirements.in
# langchain-qdrant
regex==2024.9.11
# via
# tiktoken
# transformers
requests==2.32.3
# via
# huggingface-hub
# langchain
# langchain-community
# langsmith
# opentelemetry-exporter-otlp-proto-http
# requests-toolbelt
# tiktoken
# transformers
requests-toolbelt==1.0.0
# via langsmith
safetensors==0.4.5
# via transformers
scikit-learn==1.5.2
# via sentence-transformers
scipy==1.14.1
# via
# scikit-learn
# sentence-transformers
sentence-transformers==3.2.0
# via langchain-huggingface
simple-websocket==1.1.0
# via python-engineio
sniffio==1.3.1
# via
# anthropic
# anyio
# httpx
# openai
sqlalchemy==2.0.35
# via
# langchain
# langchain-community
starlette==0.37.2
# via
# chainlit
# fastapi
sympy==1.13.3
# via torch
syncer==2.0.3
# via chainlit
tenacity==8.5.0
# via
# langchain
# langchain-community
# langchain-core
threadpoolctl==3.5.0
# via scikit-learn
tiktoken==0.8.0
# via langchain-openai
tokenizers==0.20.1
# via
# anthropic
# langchain-huggingface
# transformers
tomli==2.0.2
# via chainlit
torch==2.4.1
# via sentence-transformers
tqdm==4.66.5
# via
# huggingface-hub
# openai
# sentence-transformers
# transformers
transformers==4.45.2
# via
# langchain-huggingface
# sentence-transformers
triton==3.0.0
# via torch
typing-extensions==4.12.2
# via
# anthropic
# anyio
# fastapi
# huggingface-hub
# langchain-core
# multidict
# openai
# opentelemetry-sdk
# pydantic
# pydantic-core
# sqlalchemy
# torch
# typing-inspect
# uvicorn
typing-inspect==0.9.0
# via dataclasses-json
uptrace==1.27.0
# via chainlit
urllib3==2.2.3
# via
# qdrant-client
# requests
uvicorn==0.25.0
# via chainlit
watchfiles==0.20.0
# via chainlit
wrapt==1.16.0
# via
# deprecated
# opentelemetry-instrumentation
wsproto==1.2.0
# via simple-websocket
yarl==1.14.0
# via aiohttp
zipp==3.20.2
# via importlib-metadata
# The following packages are considered to be unsafe in a requirements file:
# setuptools