# This file was autogenerated by uv via the following command: # uv export --no-dev --no-hashes -o requirements.txt aiofiles==25.1.0 # via parlant aiopenapi3==0.9.0 # via parlant aiorwlock==1.5.0 # via parlant altair==6.0.0 # via streamlit annotated-doc==0.0.4 # via fastapi annotated-types==0.7.0 # via pydantic anyio==4.12.1 # via # google-genai # httpx # mcp # openai # sse-starlette # starlette attrs==25.4.0 # via # cyclopts # jsonschema # referencing authlib==1.6.6 # via # fastmcp # parlant backports-tarfile==1.2.0 ; python_full_version < '3.12' # via jaraco-context beartype==0.22.9 # via # py-key-value-aio # py-key-value-shared blinker==1.9.0 # via streamlit boto3==1.42.43 # via parlant botocore==1.42.43 # via # boto3 # s3transfer cachetools==6.2.6 # via # parlant # py-key-value-aio # streamlit certifi==2026.1.4 # via # httpcore # httpx # requests cffi==2.0.0 ; platform_python_implementation != 'PyPy' # via cryptography chardet==5.2.0 # via prance charset-normalizer==3.4.4 # via requests click==8.3.1 # via # parlant # streamlit # typer-slim # uvicorn colorama==0.4.6 # via # click # parlant # tqdm coloredlogs==15.0.1 # via parlant contextvars==2.4 # via parlant contourpy==1.3.3 # via matplotlib croniter==6.0.0 # via parlant cryptography==46.0.4 # via # authlib # google-auth # pyjwt # secretstorage cuda-bindings==12.9.4 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via torch cuda-pathfinder==1.3.3 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via cuda-bindings cycler==0.12.1 # via matplotlib cyclopts==4.5.1 # via fastmcp deprecated==1.3.1 # via limits diskcache==5.6.3 # via py-key-value-aio distro==1.9.0 # via # google-genai # openai dnspython==2.8.0 # via email-validator docstring-parser==0.17.0 # via cyclopts docutils==0.22.4 # via rich-rst email-validator==2.3.0 # via # aiopenapi3 # pydantic exceptiongroup==1.3.1 # via fastmcp fastapi==0.128.2 # via parlant fastmcp==2.13.0 # via parlant filelock==3.20.3 # via # huggingface-hub # torch fonttools==4.61.1 # via matplotlib fsspec==2026.2.0 # via # huggingface-hub # torch gitdb==4.0.12 # via gitpython gitpython==3.1.46 # via streamlit google-api-core==2.29.0 # via parlant google-auth==2.48.0 # via # google-api-core # google-genai google-genai==1.62.0 # via # parlant # trialpath googleapis-common-protos==1.72.0 # via # google-api-core # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http grpcio==1.78.0 # via opentelemetry-exporter-otlp-proto-grpc h11==0.16.0 # via # httpcore # uvicorn hf-xet==1.2.0 ; platform_machine == 'AMD64' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64' # via huggingface-hub httpcore==1.0.9 # via httpx httpx==0.28.1 # via # aiopenapi3 # fastmcp # google-genai # huggingface-hub # mcp # openai # parlant # parlant-client # trialpath httpx-sse==0.4.3 # via mcp huggingface-hub==1.4.1 # via # tokenizers # trialpath humanfriendly==10.0 # via coloredlogs idna==3.11 # via # anyio # email-validator # httpx # requests # yarl immutables==0.21 # via contextvars importlib-metadata==8.7.1 # via # keyring # opentelemetry-api isodate==0.7.2 # via openapi-core jaraco-classes==3.4.0 # via keyring jaraco-context==6.1.0 # via keyring jaraco-functools==4.4.0 # via keyring jeepney==0.9.0 ; sys_platform == 'linux' # via # keyring # secretstorage jinja2==3.1.6 # via # altair # parlant # pydeck # torch jiter==0.13.0 # via openai jmespath==1.1.0 # via # aiopenapi3 # boto3 # botocore jsonfinder==0.4.2 # via parlant jsonschema==4.26.0 # via # altair # mcp # openapi-core # openapi-schema-validator # openapi-spec-validator # parlant jsonschema-path==0.3.4 # via # openapi-core # openapi-spec-validator jsonschema-specifications==2025.9.1 # via # jsonschema # openapi-schema-validator keyring==25.7.0 # via py-key-value-aio kiwisolver==1.4.9 # via matplotlib lagom==2.7.7 # via parlant lazy-object-proxy==1.12.0 # via openapi-spec-validator librt==0.7.8 ; platform_python_implementation != 'PyPy' # via mypy limits==5.8.0 # via parlant markdown-it-py==4.0.0 # via rich markupsafe==3.0.3 # via # jinja2 # werkzeug matplotlib==3.10.8 # via networkx mcp==1.26.0 # via # fastmcp # parlant mdurl==0.1.2 # via markdown-it-py more-itertools==10.8.0 # via # aiopenapi3 # jaraco-classes # jaraco-functools # openapi-core # parlant mpmath==1.3.0 # via sympy multidict==6.7.1 # via yarl mypy==1.19.1 # via parlant-client mypy-extensions==1.1.0 # via mypy nano-vectordb==0.0.4.3 # via parlant nanoid==2.0.0 # via # parlant # parlant-client narwhals==2.16.0 # via altair networkx==3.6.1 # via # parlant # torch numpy==2.4.2 # via # contourpy # matplotlib # nano-vectordb # networkx # pandas # pydeck # scipy # streamlit nvidia-cublas-cu12==12.8.4.1 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via # nvidia-cudnn-cu12 # nvidia-cusolver-cu12 # torch nvidia-cuda-cupti-cu12==12.8.90 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via torch nvidia-cuda-nvrtc-cu12==12.8.93 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via torch nvidia-cuda-runtime-cu12==12.8.90 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via torch nvidia-cudnn-cu12==9.10.2.21 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via torch nvidia-cufft-cu12==11.3.3.83 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via torch nvidia-cufile-cu12==1.13.1.3 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via torch nvidia-curand-cu12==10.3.9.90 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via torch nvidia-cusolver-cu12==11.7.3.90 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via torch nvidia-cusparse-cu12==12.5.8.93 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via # nvidia-cusolver-cu12 # torch nvidia-cusparselt-cu12==0.7.1 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via torch nvidia-nccl-cu12==2.27.5 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via torch nvidia-nvjitlink-cu12==12.8.93 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via # nvidia-cufft-cu12 # nvidia-cusolver-cu12 # nvidia-cusparse-cu12 # torch nvidia-nvshmem-cu12==3.4.5 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via torch nvidia-nvtx-cu12==12.8.90 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via torch openai==2.17.0 # via parlant openapi-core==0.22.0 # via fastmcp openapi-pydantic==0.5.1 # via fastmcp openapi-schema-validator==0.6.3 # via # openapi-core # openapi-spec-validator openapi-spec-validator==0.7.2 # via # openapi-core # openapi3-parser openapi3-parser==1.1.21 # via parlant opentelemetry-api==1.39.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-instrumentation # opentelemetry-sdk # opentelemetry-semantic-conventions # parlant opentelemetry-exporter-otlp==1.39.1 # via parlant opentelemetry-exporter-otlp-proto-common==1.39.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http opentelemetry-exporter-otlp-proto-grpc==1.39.1 # via opentelemetry-exporter-otlp opentelemetry-exporter-otlp-proto-http==1.39.1 # via opentelemetry-exporter-otlp opentelemetry-instrumentation==0.60b1 # via parlant opentelemetry-proto==1.39.1 # via # opentelemetry-exporter-otlp-proto-common # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http opentelemetry-sdk==1.39.1 # via # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # parlant opentelemetry-semantic-conventions==0.60b1 # via # opentelemetry-instrumentation # opentelemetry-sdk packaging==26.0 # via # altair # huggingface-hub # limits # matplotlib # opentelemetry-instrumentation # prance # streamlit pandas==2.3.3 # via # networkx # streamlit parlant==3.1.2 # via trialpath parlant-client==3.1.0 # via parlant pathable==0.4.4 # via jsonschema-path pathspec==1.0.4 # via mypy pathvalidate==3.3.1 # via py-key-value-aio pillow==12.1.0 # via # matplotlib # streamlit # trialpath platformdirs==4.5.1 # via fastmcp prance==25.4.8.0 # via openapi3-parser propcache==0.4.1 # via yarl proto-plus==1.27.1 # via google-api-core protobuf==6.33.5 # via # google-api-core # googleapis-common-protos # opentelemetry-proto # proto-plus # streamlit py-key-value-aio==0.2.8 # via fastmcp py-key-value-shared==0.2.8 # via py-key-value-aio pyarrow==23.0.0 # via streamlit pyasn1==0.6.2 # via # pyasn1-modules # rsa pyasn1-modules==0.4.2 # via google-auth pycparser==3.0 ; implementation_name != 'PyPy' and platform_python_implementation != 'PyPy' # via cffi pydantic==2.11.9 # via # aiopenapi3 # fastapi # fastmcp # google-genai # mcp # openai # openapi-pydantic # parlant-client # pydantic-settings # trialpath pydantic-core==2.33.2 # via pydantic pydantic-settings==2.12.0 # via mcp pydeck==0.9.1 # via streamlit pygments==2.19.2 # via rich pyjwt==2.11.0 # via mcp pyparsing==3.3.2 # via matplotlib pyperclip==1.11.0 # via fastmcp pyreadline3==3.5.4 ; sys_platform == 'win32' # via humanfriendly python-dateutil==2.9.0.post0 # via # botocore # croniter # matplotlib # pandas # parlant python-dotenv==1.2.1 # via # fastmcp # parlant # pydantic-settings # trialpath python-multipart==0.0.22 # via mcp pytz==2025.2 # via # croniter # pandas pywin32==311 ; sys_platform == 'win32' # via mcp pywin32-ctypes==0.2.3 ; sys_platform == 'win32' # via keyring pyyaml==6.0.3 # via # aiopenapi3 # huggingface-hub # jsonschema-path referencing==0.36.2 # via # jsonschema # jsonschema-path # jsonschema-specifications # types-jsonschema regex==2026.1.15 # via tiktoken requests==2.32.5 # via # google-api-core # google-auth # google-genai # jsonschema-path # opentelemetry-exporter-otlp-proto-http # parlant # prance # streamlit # tiktoken # trialpath rfc3339-validator==0.1.4 # via openapi-schema-validator rich==14.3.2 # via # cyclopts # fastmcp # parlant # rich-rst rich-rst==1.3.2 # via cyclopts rpds-py==0.30.0 # via # jsonschema # referencing rsa==4.9.1 # via google-auth ruamel-yaml==0.19.1 # via prance s3transfer==0.16.0 # via boto3 scipy==1.17.0 # via networkx secretstorage==3.5.0 ; sys_platform == 'linux' # via keyring semver==3.0.4 # via parlant setuptools==80.10.2 ; python_full_version >= '3.12' # via torch shellingham==1.5.4 # via huggingface-hub six==1.17.0 # via # python-dateutil # rfc3339-validator smmap==5.0.2 # via gitdb sniffio==1.3.1 # via # google-genai # openai sse-starlette==3.2.0 # via mcp starlette==0.50.0 # via # fastapi # mcp # parlant # sse-starlette streamlit==1.54.0 # via trialpath structlog==25.5.0 # via # parlant # trialpath sympy==1.14.0 # via torch tabulate==0.9.0 # via parlant tenacity==9.1.3 # via # google-genai # streamlit tiktoken==0.12.0 # via parlant tokenizers==0.22.2 # via parlant toml==0.10.2 # via # parlant # streamlit torch==2.10.0 # via parlant tornado==6.5.4 # via streamlit tqdm==4.67.3 # via # huggingface-hub # openai triton==3.6.0 ; platform_machine == 'x86_64' and sys_platform == 'linux' # via torch typer-slim==0.21.1 # via huggingface-hub types-aiofiles==25.1.0.20251011 # via parlant types-cachetools==6.2.0.20251022 # via parlant types-croniter==6.0.0.20250809 # via parlant types-jsonschema==4.26.0.20260202 # via parlant typing-extensions==4.15.0 # via # altair # anyio # exceptiongroup # fastapi # google-genai # grpcio # huggingface-hub # limits # mcp # mypy # openai # openapi-core # opentelemetry-api # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-sdk # opentelemetry-semantic-conventions # py-key-value-shared # pydantic # pydantic-core # referencing # starlette # streamlit # torch # typer-slim # typing-inspection typing-inspection==0.4.2 # via # fastapi # mcp # pydantic # pydantic-settings tzdata==2025.3 # via pandas urllib3==2.6.3 # via # botocore # requests uvicorn==0.40.0 # via # mcp # parlant watchdog==6.0.0 ; sys_platform != 'darwin' # via streamlit websocket-client==1.9.0 # via parlant websockets==15.0.1 # via # fastmcp # google-genai werkzeug==3.1.5 # via openapi-core wrapt==1.17.3 # via # deprecated # opentelemetry-instrumentation yarl==1.22.0 # via aiopenapi3 zipp==3.23.0 # via importlib-metadata