@@ -69,6 +69,8 @@ docs = [
6969]
7070litellm = [
7171 " litellm>=1.73.1,<2.0.0" ,
72+ # https://github.com/BerriAI/litellm/issues/13711
73+ " openai<1.100.0" ,
7274]
7375llamaapi = [
7476 " llama-api-client>=0.1.0,<1.0.0" ,
@@ -106,50 +108,7 @@ a2a = [
106108 " starlette>=0.46.2,<1.0.0" ,
107109]
108110all = [
109- # anthropic
110- " anthropic>=0.21.0,<1.0.0" ,
111-
112- # dev
113- " commitizen>=4.4.0,<5.0.0" ,
114- " hatch>=1.0.0,<2.0.0" ,
115- " moto>=5.1.0,<6.0.0" ,
116- " mypy>=1.15.0,<2.0.0" ,
117- " pre-commit>=3.2.0,<4.2.0" ,
118- " pytest>=8.0.0,<9.0.0" ,
119- " pytest-asyncio>=0.26.0,<0.27.0" ,
120- " pytest-cov>=4.1.0,<5.0.0" ,
121- " pytest-xdist>=3.0.0,<4.0.0" ,
122- " ruff>=0.4.4,<0.5.0" ,
123-
124- # docs
125- " sphinx>=5.0.0,<6.0.0" ,
126- " sphinx-rtd-theme>=1.0.0,<2.0.0" ,
127- " sphinx-autodoc-typehints>=1.12.0,<2.0.0" ,
128-
129- # litellm
130- " litellm>=1.72.6,<1.73.0" ,
131-
132- # llama
133- " llama-api-client>=0.1.0,<1.0.0" ,
134-
135- # mistral
136- " mistralai>=1.8.2" ,
137-
138- # ollama
139- " ollama>=0.4.8,<1.0.0" ,
140-
141- # openai
142- " openai>=1.68.0,<2.0.0" ,
143-
144- # otel
145- " opentelemetry-exporter-otlp-proto-http>=1.30.0,<2.0.0" ,
146-
147- # a2a
148- " a2a-sdk[sql]>=0.3.0,<0.4.0" ,
149- " uvicorn>=0.34.2,<1.0.0" ,
150- " httpx>=0.28.1,<1.0.0" ,
151- " fastapi>=0.115.12,<1.0.0" ,
152- " starlette>=0.46.2,<1.0.0" ,
111+ " strands-agents[a2a,anthropic,dev,docs,litellm,llamaapi,mistral,ollama,openai,otel]" ,
153112]
154113
155114[tool .hatch .version ]
@@ -161,7 +120,7 @@ features = ["anthropic", "litellm", "llamaapi", "ollama", "openai", "otel", "mis
161120dependencies = [
162121 " mypy>=1.15.0,<2.0.0" ,
163122 " ruff>=0.11.6,<0.12.0" ,
164- " strands-agents @ {root:uri}"
123+ " strands-agents @ {root:uri}" ,
165124]
166125
167126[tool .hatch .envs .hatch-static-analysis .scripts ]
0 commit comments