From d0af2e5ade9e239b840277a6b6c954806ade678d Mon Sep 17 00:00:00 2001 From: stevef1uk Date: Tue, 13 May 2025 22:40:11 +0200 Subject: [PATCH 1/3] Rebase and latest fixes or k3s --- README.md | 232 ++++++- apps/rowboat/Dockerfile | 66 +- apps/rowboat/next.config.mjs | 42 ++ apps/rowboat/package-lock.json | 692 ++++++++++++++------ apps/rowboat/package.json | 1 + apps/rowboat/server.js | 67 ++ docker-compose.yml | 81 +-- k8s/arm64/app-config.yaml | 27 + k8s/arm64/app-secrets.yaml | 28 + k8s/arm64/apply.sh | 31 + k8s/arm64/cleanup.sh | 30 + k8s/arm64/copilot-deployment.yaml | 80 +++ k8s/arm64/docs-deployment.yaml | 38 ++ k8s/arm64/mongodb-deployment.yaml | 72 ++ k8s/arm64/mongodb-vpc.yaml | 10 + k8s/arm64/openai-secret.yaml | 7 + k8s/arm64/redis-deployment.yaml | 49 ++ k8s/arm64/rowboat-agents-deployment.yaml | 74 +++ k8s/arm64/rowboat-agents-go-deployment.yaml | 119 ++++ k8s/arm64/rowboat-deployment.yaml | 154 +++++ k8s/arm64/rowboat-service.yaml | 13 + k8s/arm64/rowboat_agents-deployment.yaml | 98 +++ k8s/arm64/setup-port-forwards.sh | 24 + k8s/arm64/start_up.sh | 11 + k8s/arm64/stop-port-forwards.sh | 15 + 25 files changed, 1744 insertions(+), 317 deletions(-) create mode 100644 apps/rowboat/server.js create mode 100644 k8s/arm64/app-config.yaml create mode 100644 k8s/arm64/app-secrets.yaml create mode 100644 k8s/arm64/apply.sh create mode 100755 k8s/arm64/cleanup.sh create mode 100644 k8s/arm64/copilot-deployment.yaml create mode 100644 k8s/arm64/docs-deployment.yaml create mode 100644 k8s/arm64/mongodb-deployment.yaml create mode 100644 k8s/arm64/mongodb-vpc.yaml create mode 100644 k8s/arm64/openai-secret.yaml create mode 100644 k8s/arm64/redis-deployment.yaml create mode 100644 k8s/arm64/rowboat-agents-deployment.yaml create mode 100644 k8s/arm64/rowboat-agents-go-deployment.yaml create mode 100644 k8s/arm64/rowboat-deployment.yaml create mode 100644 k8s/arm64/rowboat-service.yaml create mode 100644 k8s/arm64/rowboat_agents-deployment.yaml create mode 100755 k8s/arm64/setup-port-forwards.sh create mode 100755 k8s/arm64/start_up.sh create mode 100755 k8s/arm64/stop-port-forwards.sh diff --git a/README.md b/README.md index 7b7460ce2..c55358fcc 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@

Let AI build multi-agent workflows for you in minutes

-[Quickstart](#quick-start) | [Docs](https://docs.rowboatlabs.com/) | [Discord](https://discord.gg/gtbGcqF4) | [Website](https://www.rowboatlabs.com/) | [Youtube](https://www.youtube.com/@RowBoatLabs) +[Quickstart](#quick-start) | [Docs](https://docs.rowboatlabs.com/) | [Website](https://www.rowboatlabs.com/) | [Discord](https://discord.gg/jHhUKkKHn8)
@@ -22,19 +22,15 @@ Powered by OpenAI's Agents SDK, Rowboat is the fastest way to build multi-agents export OPENAI_API_KEY=your-openai-api-key ``` -2. Clone the repository and start Rowboat +2. Clone the repository and start Rowboat docker ```bash git clone git@github.com:rowboatlabs/rowboat.git cd rowboat - ./start.sh + docker-compose up --build ``` 3. Access the app at [http://localhost:3000](http://localhost:3000). -Note: We have added native RAG support including file-uploads and URL scraping. See the [RAG](https://docs.rowboatlabs.com/using_rag) section of our docs for this. - -Note: See the [Using custom LLM providers](https://docs.rowboatlabs.com/setup/#using-custom-llm-providers) section of our docs for using custom providers like OpenRouter and LiteLLM. - ## Demo #### Create a multi-agent assistant with MCP tools by chatting with Rowboat @@ -99,3 +95,225 @@ There are 2 ways to integrate with the agents you create in Rowboat Refer to [Docs](https://docs.rowboatlabs.com/) to learn how to start building agents with Rowboat. + +## Running on Kubernetes + +### Prerequisites +- Kubernetes cluster (tested with k3s on Raspberry Pi) +- kubectl configured to access your cluster +- Docker registry access (for pushing images) +- SSH access to the target server (for deployment) + +### Important Configuration Note +Before deploying, you must update the image names in the Kubernetes configuration files to use your own Docker Hub username and image names: + +1. In `k8s/arm64/rowboat_agents-deployment.yaml`: + - Change `stevef1uk/rowboat_agents:arm64` to `your-dockerhub-username/rowboat_agents:arm64` + +2. In `k8s/arm64/rowboat-deployment.yaml`: + - Change `stevef1uk/rowboat:arm64` to `your-dockerhub-username/rowboat:arm64` + +3. In `k8s/arm64/copilot-deployment.yaml`: + - Change `stevef1uk/copilot:arm64` to `your-dockerhub-username/copilot:arm64` + +Replace `your-dockerhub-username` with your actual Docker Hub username. These changes ensure that Kubernetes pulls the correct images from your Docker Hub repository. + +### Deployment Steps +1. Clone the repository +2. Update the following files with your specific configuration: + - `k8s/app-config.yaml`: Update environment variables + - `k8s/app-secrets.yaml`: Add your secrets (Auth0, OpenAI, etc.) + - `k8s/openai-secret.yaml`: Add your OpenAI API key + +3. Set up the OpenAI secret: + ```bash + # Generate base64-encoded API key + echo -n "your-openai-api-key" | base64 + + # Copy the output and replace the api-key value in k8s/openai-secret.yaml + # The file should look like this: + # apiVersion: v1 + # kind: Secret + # metadata: + # name: openai-secret + # type: Opaque + # data: + # api-key: "YOUR_BASE64_ENCODED_API_KEY" + ``` + +4. Apply the configurations in order: + ```bash + kubectl apply -f k8s/app-config.yaml + kubectl apply -f k8s/app-secrets.yaml + kubectl apply -f k8s/openai-secret.yaml + ``` + +5. Deploy the applications: + ```bash + kubectl apply -f k8s/redis-deployment.yaml + kubectl apply -f k8s/mongodb-deployment.yaml + kubectl apply -f k8s/rowboat_agents-deployment.yaml + kubectl apply -f k8s/copilot-deployment.yaml + kubectl apply -f k8s/rowboat-deployment.yaml + ``` + +6. Restart the deployments to ensure they pick up the new configurations: + ```bash + kubectl rollout restart deployment redis + kubectl rollout restart deployment mongodb + kubectl rollout restart deployment rowboat-agents + kubectl rollout restart deployment copilot + kubectl rollout restart deployment rowboat + ``` + +### Accessing the Application + +#### Option 1: Using Port Forwarding (Development/Testing) +To access the Rowboat UI using port forwarding: + +```bash +# Forward port 3000 to access the Rowboat UI +kubectl port-forward svc/rowboat 3000:3000 +``` + +Then access the application at http://localhost:3000 + +#### Option 2: Using Ingress (Production) +For production deployments, it's recommended to use an Ingress controller. Here's how to set it up: + +1. Create an Ingress configuration file `k8s/ingress.yaml`: + ```yaml + apiVersion: networking.k8s.io/v1 + kind: Ingress + metadata: + name: rowboat-ingress + annotations: + nginx.ingress.kubernetes.io/rewrite-target: / + nginx.ingress.kubernetes.io/ssl-redirect: "false" + spec: + rules: + - host: rowboat.your-domain.com # Replace with your domain + http: + paths: + - path: / + pathType: Prefix + backend: + service: + name: rowboat + port: + number: 3000 + ``` + +2. Apply the Ingress configuration: + ```bash + kubectl apply -f k8s/ingress.yaml + ``` + +3. Configure your DNS to point to your cluster's Ingress controller IP address. + +4. If using HTTPS (recommended for production): + - Add SSL certificate configuration to the Ingress + - Update the Auth0 configuration to use the HTTPS URL + - Set `ssl-redirect: "true"` in the Ingress annotations + +### Configuration + +The application uses the following configuration files: +- `k8s/app-config.yaml`: Contains environment variables and service configurations +- `k8s/app-secrets.yaml`: Contains sensitive information like API keys and Auth0 configuration + +### Troubleshooting + +1. Check deployment status: +```bash +kubectl get deployments +kubectl get pods +``` + +2. View logs: +```bash +kubectl logs deployment/rowboat +kubectl logs deployment/rowboat-agents +kubectl logs deployment/copilot +``` + +3. Common issues: +- If services are not accessible, verify port forwarding is running or Ingress is configured correctly +- If MongoDB connection fails, check the connection string in `app-config.yaml` +- If Auth0 authentication fails, verify the configuration in `app-secrets.yaml` +- If using Ingress, check the Ingress controller logs: + ```bash + kubectl logs -n ingress-nginx -l app.kubernetes.io/name=ingress-nginx + ``` + +### Cleanup + +To remove all Kubernetes resources: +```bash +kubectl delete -f k8s/ +``` + +## CI/CD Configuration + +The project uses Drone CI for continuous integration and deployment. The configuration is defined in `.drone.yml` and supports building and deploying to ARM64-based Kubernetes clusters. + +### Required Secrets + +The following secrets need to be configured in your Drone CI environment: + +- `docker_username`: Your Docker Hub username +- `docker_password`: Your Docker Hub password +- `SSH_USER`: Username for SSH access to the target server +- `SSH_PASSWORD`: Password for SSH access to the target server + +## Kubernetes Deployment + +### API Key Configuration + +When deploying to Kubernetes, ensure the following API keys are properly configured in `k8s/arm64/app-secrets.yaml`: + +1. **CoPilot Service Authentication** + - The `COPILOT_API_KEY` must be identical in both the Rowboat and CoPilot services + - This key is used for internal service-to-service authentication + - Example configuration: + ```yaml + COPILOT_API_KEY: "your-shared-api-key" # Must be the same value in both services + ``` + +2. **OpenAI Integration** + - `OPENAI_API_KEY`: Your OpenAI API key for GPT model access + - `PROVIDER_API_KEY`: Optional provider-specific API key if using a different LLM provider + - `PROVIDER_BASE_URL`: Optional base URL for custom LLM provider + +3. **Service-to-Service Communication** + - `AGENTS_API_KEY`: API key for Rowboat Agents service authentication + - Must be consistent across all services that need to communicate with the Agents service + +### Common Issues + +1. **403 Forbidden Errors** + - If you see "Streaming failed" or 403 errors in the UI, check that: + - `COPILOT_API_KEY` is identical in both Rowboat and CoPilot services + - The API key is properly set in the Kubernetes secret + - Both services have been restarted after updating the secret + +2. **Service Connectivity** + - Ensure all services can resolve each other using the Kubernetes service names: + - Rowboat → CoPilot: `http://copilot:3002` + - Rowboat → Agents: `http://rowboat-agents:3001` + - Rowboat → MongoDB: `mongodb://admin:password@mongodb:27017/rowboat?authSource=admin` + +### Updating Secrets + +To update API keys or other secrets: + +1. Edit `k8s/arm64/app-secrets.yaml` +2. Apply the changes: + ```bash + kubectl apply -f k8s/arm64/app-secrets.yaml + ``` +3. Restart the affected services: + ```bash + kubectl rollout restart deployment/copilot + kubectl rollout restart deployment/rowboat + ``` diff --git a/apps/rowboat/Dockerfile b/apps/rowboat/Dockerfile index a6737c9b8..a22719a14 100644 --- a/apps/rowboat/Dockerfile +++ b/apps/rowboat/Dockerfile @@ -1,67 +1,37 @@ # syntax=docker.io/docker/dockerfile:1 -FROM node:18-alpine AS base +FROM node:20-alpine AS builder -# Install dependencies only when needed -FROM base AS deps -# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed. -RUN apk add --no-cache libc6-compat WORKDIR /app -# Install dependencies based on the preferred package manager -COPY package.json yarn.lock* package-lock.json* pnpm-lock.yaml* .npmrc* ./ -RUN \ - if [ -f yarn.lock ]; then yarn --frozen-lockfile; \ - elif [ -f package-lock.json ]; then npm ci; \ - elif [ -f pnpm-lock.yaml ]; then corepack enable pnpm && pnpm i --frozen-lockfile; \ - else echo "Lockfile not found." && exit 1; \ - fi +# Copy package files +COPY package.json package-lock.json ./ +# Install dependencies +RUN npm install -# Rebuild the source code only when needed -FROM base AS builder -WORKDIR /app -COPY --from=deps /app/node_modules ./node_modules +# Copy source code COPY . . -# Next.js collects completely anonymous telemetry data about general usage. -# Learn more here: https://nextjs.org/telemetry -# Uncomment the following line in case you want to disable telemetry during the build. -# ENV NEXT_TELEMETRY_DISABLED=1 +# Build the application +RUN npm run build -RUN \ - if [ -f yarn.lock ]; then yarn run build; \ - elif [ -f package-lock.json ]; then npm run build; \ - elif [ -f pnpm-lock.yaml ]; then corepack enable pnpm && pnpm run build; \ - else echo "Lockfile not found." && exit 1; \ - fi +# Production image +FROM node:20-alpine AS runner -# Production image, copy all the files and run next -FROM base AS runner WORKDIR /app -ENV NODE_ENV=production -# Uncomment the following line in case you want to disable telemetry during runtime. -# ENV NEXT_TELEMETRY_DISABLED=1 - -RUN addgroup --system --gid 1001 nodejs -RUN adduser --system --uid 1001 nextjs - +# Copy necessary files from builder +COPY --from=builder /app/.next/standalone ./ +COPY --from=builder /app/.next/static ./.next/static COPY --from=builder /app/public ./public -# Automatically leverage output traces to reduce image size -# https://nextjs.org/docs/advanced-features/output-file-tracing -COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ -COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static - -USER nextjs +# Set environment variables +ENV NODE_ENV=production +ENV PORT=3000 +# Expose port EXPOSE 3000 -ENV PORT=3000 - -# server.js is created by next build from the standalone output -# https://nextjs.org/docs/pages/api-reference/config/next-config-js/output -ENV HOSTNAME="0.0.0.0" -ENV PORT=3000 +# Start the server using the standalone output CMD ["node", "server.js"] \ No newline at end of file diff --git a/apps/rowboat/next.config.mjs b/apps/rowboat/next.config.mjs index 4e18a964f..0882ba327 100644 --- a/apps/rowboat/next.config.mjs +++ b/apps/rowboat/next.config.mjs @@ -1,5 +1,47 @@ /** @type {import('next').NextConfig} */ const nextConfig = { + // Disable some optimizations to reduce memory usage during build + swcMinify: true, // Enable minification + optimizeFonts: true, + productionBrowserSourceMaps: false, + + // Disable unnecessary features + images: { + unoptimized: true, // Disable image optimization + }, + + // Reduce output size + compress: true, + poweredByHeader: false, + + // App Router specific configurations + experimental: { + // Remove serverActions as it's now enabled by default + }, + + // Custom webpack configuration + webpack: (config, { isServer }) => { + // Add any custom webpack configurations here + return config; + }, + + // Ensure catch-all routes are handled correctly + pageExtensions: ['js', 'jsx', 'ts', 'tsx'], + + // Disable the default catch-all route handling + async rewrites() { + return [ + { + source: '/projects/:projectId/test/:appType*', + destination: '/projects/:projectId/test/:appType', + }, + { + source: '/projects/:projectId/workflow', + destination: '/projects/:projectId/workflow', + }, + ] + }, + output: 'standalone', }; diff --git a/apps/rowboat/package-lock.json b/apps/rowboat/package-lock.json index f04383916..159c7e3d4 100644 --- a/apps/rowboat/package-lock.json +++ b/apps/rowboat/package-lock.json @@ -30,6 +30,7 @@ "clsx": "^2.1.1", "date-fns": "^4.1.0", "dotenv": "^16.4.5", + "express": "^4.18.3", "framer-motion": "^11.5.4", "fuse.js": "^7.1.0", "immer": "^10.1.1", @@ -9996,6 +9997,245 @@ "node": ">=18" } }, + "node_modules/@modelcontextprotocol/sdk/node_modules/accepts": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "license": "MIT", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/body-parser": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", + "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", + "license": "MIT", + "dependencies": { + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.6.3", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/content-disposition": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/cookie-signature": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "license": "MIT", + "engines": { + "node": ">=6.6.0" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/express": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", + "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", + "license": "MIT", + "dependencies": { + "accepts": "^2.0.0", + "body-parser": "^2.2.0", + "content-disposition": "^1.0.0", + "content-type": "^1.0.5", + "cookie": "^0.7.1", + "cookie-signature": "^1.2.1", + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "finalhandler": "^2.1.0", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "merge-descriptors": "^2.0.0", + "mime-types": "^3.0.0", + "on-finished": "^2.4.1", + "once": "^1.4.0", + "parseurl": "^1.3.3", + "proxy-addr": "^2.0.7", + "qs": "^6.14.0", + "range-parser": "^1.2.1", + "router": "^2.2.0", + "send": "^1.1.0", + "serve-static": "^2.2.0", + "statuses": "^2.0.1", + "type-is": "^2.0.1", + "vary": "^1.1.2" + }, + "engines": { + "node": ">= 18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/finalhandler": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", + "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "license": "MIT", + "dependencies": { + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/fresh": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/media-typer": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/merge-descriptors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/mime-types": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", + "license": "MIT", + "dependencies": { + "mime-db": "^1.54.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/negotiator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/send": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", + "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", + "license": "MIT", + "dependencies": { + "debug": "^4.3.5", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^2.0.0", + "http-errors": "^2.0.0", + "mime-types": "^3.0.1", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/serve-static": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", + "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", + "license": "MIT", + "dependencies": { + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.2.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@modelcontextprotocol/sdk/node_modules/type-is": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", + "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", + "license": "MIT", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/@mongodb-js/saslprep": { "version": "1.1.8", "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.1.8.tgz", @@ -13535,31 +13775,13 @@ } }, "node_modules/accepts": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", - "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", - "dependencies": { - "mime-types": "^3.0.0", - "negotiator": "^1.0.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/accepts/node_modules/mime-db": { - "version": "1.53.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.53.0.tgz", - "integrity": "sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg==", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/accepts/node_modules/mime-types": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.0.tgz", - "integrity": "sha512-XqoSHeCGjVClAmoGFG3lVFqQFRIrTVw2OH3axRqAcfaw+gHWIfnASS92AV+Rl/mk0MupgZTRHQOjxY6YVnzK5w==", + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "license": "MIT", "dependencies": { - "mime-db": "^1.53.0" + "mime-types": "~2.1.34", + "negotiator": "0.6.3" }, "engines": { "node": ">= 0.6" @@ -13724,6 +13946,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", + "license": "MIT" + }, "node_modules/array-includes": { "version": "3.1.8", "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.8.tgz", @@ -14015,28 +14243,43 @@ } }, "node_modules/body-parser": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.1.0.tgz", - "integrity": "sha512-/hPxh61E+ll0Ujp24Ilm64cykicul1ypfwjVttduAiEdtnJFvLePSrIPk+HMImtNv5270wOGCb1Tns2rybMkoQ==", + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "license": "MIT", "dependencies": { - "bytes": "^3.1.2", - "content-type": "^1.0.5", - "debug": "^4.4.0", - "http-errors": "^2.0.0", - "iconv-lite": "^0.5.2", - "on-finished": "^2.4.1", - "qs": "^6.14.0", - "raw-body": "^3.0.0", - "type-is": "^2.0.0" + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" }, "engines": { - "node": ">=18" + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/body-parser/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" } }, "node_modules/body-parser/node_modules/iconv-lite": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.5.2.tgz", - "integrity": "sha512-kERHXvpSaB4aU3eANwidg79K8FlrN77m8G9V+0vOR3HYaRifrlwMEpT7ZBJqLSEIHnEgJTHcWK82wwLwwKwtag==", + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "license": "MIT", "dependencies": { "safer-buffer": ">= 2.1.2 < 3" }, @@ -14044,6 +14287,42 @@ "node": ">=0.10.0" } }, + "node_modules/body-parser/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/body-parser/node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/body-parser/node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", @@ -14483,9 +14762,10 @@ "dev": true }, "node_modules/content-disposition": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", - "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "license": "MIT", "dependencies": { "safe-buffer": "5.2.1" }, @@ -14516,12 +14796,10 @@ } }, "node_modules/cookie-signature": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", - "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", - "engines": { - "node": ">=6.6.0" - } + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", + "license": "MIT" }, "node_modules/cors": { "version": "2.8.5", @@ -14852,6 +15130,7 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "license": "MIT", "engines": { "node": ">= 0.8", "npm": "1.2.8000 || >= 1.4.16" @@ -15015,7 +15294,8 @@ "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" }, "node_modules/electron-to-chromium": { "version": "1.5.74", @@ -15032,6 +15312,7 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", "engines": { "node": ">= 0.8" } @@ -15308,7 +15589,8 @@ "node_modules/escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" }, "node_modules/escape-string-regexp": { "version": "4.0.0", @@ -15747,6 +16029,7 @@ "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -15784,45 +16067,49 @@ } }, "node_modules/express": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/express/-/express-5.0.1.tgz", - "integrity": "sha512-ORF7g6qGnD+YtUG9yx4DFoqCShNMmUKiXuT5oWMHiOvt/4WFbHC6yCwQMTSBMno7AqntNCAzzcnnjowRkTL9eQ==", + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", + "license": "MIT", "dependencies": { - "accepts": "^2.0.0", - "body-parser": "^2.0.1", - "content-disposition": "^1.0.0", + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.7.1", - "cookie-signature": "^1.2.1", - "debug": "4.3.6", + "cookie-signature": "1.0.6", + "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", - "finalhandler": "^2.0.0", - "fresh": "2.0.0", + "finalhandler": "1.3.1", + "fresh": "0.5.2", "http-errors": "2.0.0", - "merge-descriptors": "^2.0.0", + "merge-descriptors": "1.0.3", "methods": "~1.1.2", - "mime-types": "^3.0.0", "on-finished": "2.4.1", - "once": "1.4.0", "parseurl": "~1.3.3", + "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", "qs": "6.13.0", "range-parser": "~1.2.1", - "router": "^2.0.0", "safe-buffer": "5.2.1", - "send": "^1.1.0", - "serve-static": "^2.1.0", + "send": "0.19.0", + "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", - "type-is": "^2.0.0", + "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" }, "engines": { - "node": ">= 18" + "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/express-rate-limit": { @@ -15848,39 +16135,19 @@ } }, "node_modules/express/node_modules/debug": { - "version": "4.3.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", - "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/express/node_modules/mime-db": { - "version": "1.53.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.53.0.tgz", - "integrity": "sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg==", - "engines": { - "node": ">= 0.6" + "ms": "2.0.0" } }, - "node_modules/express/node_modules/mime-types": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.0.tgz", - "integrity": "sha512-XqoSHeCGjVClAmoGFG3lVFqQFRIrTVw2OH3axRqAcfaw+gHWIfnASS92AV+Rl/mk0MupgZTRHQOjxY6YVnzK5w==", - "dependencies": { - "mime-db": "^1.53.0" - }, - "engines": { - "node": ">= 0.6" - } + "node_modules/express/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" }, "node_modules/express/node_modules/qs": { "version": "6.13.0", @@ -16014,21 +16281,38 @@ } }, "node_modules/finalhandler": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", - "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "license": "MIT", "dependencies": { - "debug": "^4.4.0", - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "on-finished": "^2.4.1", - "parseurl": "^1.3.3", - "statuses": "^2.0.1" + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" }, "engines": { "node": ">= 0.8" } }, + "node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, "node_modules/find-root": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz", @@ -16191,11 +16475,12 @@ } }, "node_modules/fresh": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", - "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "license": "MIT", "engines": { - "node": ">= 0.8" + "node": ">= 0.6" } }, "node_modules/fs.realpath": { @@ -17118,7 +17403,8 @@ "node_modules/is-promise": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", - "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==" + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", + "license": "MIT" }, "node_modules/is-regex": { "version": "1.1.4", @@ -18061,11 +18347,12 @@ } }, "node_modules/media-typer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", - "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "license": "MIT", "engines": { - "node": ">= 0.8" + "node": ">= 0.6" } }, "node_modules/memoize-one": { @@ -18079,12 +18366,10 @@ "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==" }, "node_modules/merge-descriptors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", - "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", - "engines": { - "node": ">=18" - }, + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "license": "MIT", "funding": { "url": "https://github.com/sponsors/sindresorhus" } @@ -18652,6 +18937,18 @@ "node": ">=8.6" } }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/mime-db": { "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", @@ -18822,9 +19119,10 @@ "dev": true }, "node_modules/negotiator": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", - "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -19139,6 +19437,7 @@ "version": "2.4.1", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", "dependencies": { "ee-first": "1.1.1" }, @@ -19421,6 +19720,7 @@ "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", "engines": { "node": ">= 0.8" } @@ -19472,12 +19772,10 @@ } }, "node_modules/path-to-regexp": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", - "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", - "engines": { - "node": ">=16" - } + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", + "license": "MIT" }, "node_modules/path-type": { "version": "4.0.0", @@ -19820,6 +20118,7 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", "engines": { "node": ">= 0.6" } @@ -20194,10 +20493,13 @@ } }, "node_modules/router": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/router/-/router-2.1.0.tgz", - "integrity": "sha512-/m/NSLxeYEgWNtyC+WtNHCF7jbGxOibVWKnn+1Psff4dJGOfoXP+MuC/f2CwSmyiHdOIzYnYFp4W6GxWfekaLA==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", + "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", + "license": "MIT", "dependencies": { + "debug": "^4.4.0", + "depd": "^2.0.0", "is-promise": "^4.0.0", "parseurl": "^1.3.3", "path-to-regexp": "^8.0.0" @@ -20206,6 +20508,15 @@ "node": ">= 18" } }, + "node_modules/router/node_modules/path-to-regexp": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", + "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", + "license": "MIT", + "engines": { + "node": ">=16" + } + }, "node_modules/rowboat-shared": { "version": "1.0.0", "resolved": "git+ssh://git@github.com/rowboatlabs/shared.git#1c8e722b8c5d644672a2db92f4ba5f25d8560352", @@ -20343,52 +20654,72 @@ } }, "node_modules/send": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/send/-/send-1.1.0.tgz", - "integrity": "sha512-v67WcEouB5GxbTWL/4NeToqcZiAWEq90N888fczVArY8A79J0L4FD7vj5hm3eUMua5EpoQ59wa/oovY6TLvRUA==", + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "license": "MIT", "dependencies": { - "debug": "^4.3.5", - "destroy": "^1.2.0", - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "etag": "^1.8.1", - "fresh": "^0.5.2", - "http-errors": "^2.0.0", - "mime-types": "^2.1.35", - "ms": "^2.1.3", - "on-finished": "^2.4.1", - "range-parser": "^1.2.1", - "statuses": "^2.0.1" + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" }, "engines": { - "node": ">= 18" + "node": ">= 0.8.0" } }, - "node_modules/send/node_modules/fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "node_modules/send/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "license": "MIT", "engines": { - "node": ">= 0.6" + "node": ">= 0.8" } }, "node_modules/send/node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" }, "node_modules/serve-static": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.1.0.tgz", - "integrity": "sha512-A3We5UfEjG8Z7VkDv6uItWw6HY2bBSBJT1KtVESn6EOoOr2jAxNhxWCLY3jDE2WcuHXByWju74ck3ZgLwL8xmA==", + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "license": "MIT", "dependencies": { - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "parseurl": "^1.3.3", - "send": "^1.0.0" + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" }, "engines": { - "node": ">= 18" + "node": ">= 0.8.0" } }, "node_modules/set-function-length": { @@ -21276,32 +21607,13 @@ } }, "node_modules/type-is": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.0.tgz", - "integrity": "sha512-gd0sGezQYCbWSbkZr75mln4YBidWUN60+devscpLF5mtRDUpiaTvKpBNrdaCvel1NdR2k6vclXybU5fBd2i+nw==", - "dependencies": { - "content-type": "^1.0.5", - "media-typer": "^1.1.0", - "mime-types": "^3.0.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/type-is/node_modules/mime-db": { - "version": "1.53.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.53.0.tgz", - "integrity": "sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg==", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/type-is/node_modules/mime-types": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.0.tgz", - "integrity": "sha512-XqoSHeCGjVClAmoGFG3lVFqQFRIrTVw2OH3axRqAcfaw+gHWIfnASS92AV+Rl/mk0MupgZTRHQOjxY6YVnzK5w==", + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "license": "MIT", "dependencies": { - "mime-db": "^1.53.0" + "media-typer": "0.3.0", + "mime-types": "~2.1.24" }, "engines": { "node": ">= 0.6" diff --git a/apps/rowboat/package.json b/apps/rowboat/package.json index 77e08c9e5..f1c431bdc 100644 --- a/apps/rowboat/package.json +++ b/apps/rowboat/package.json @@ -37,6 +37,7 @@ "clsx": "^2.1.1", "date-fns": "^4.1.0", "dotenv": "^16.4.5", + "express": "^4.18.3", "framer-motion": "^11.5.4", "fuse.js": "^7.1.0", "immer": "^10.1.1", diff --git a/apps/rowboat/server.js b/apps/rowboat/server.js new file mode 100644 index 000000000..c396b41a7 --- /dev/null +++ b/apps/rowboat/server.js @@ -0,0 +1,67 @@ +import express from 'express' +import next from 'next' +import { MongoClient } from 'mongodb' +import { createServer } from 'http' + +// Create Express server +const dev = process.env.NODE_ENV !== 'production' +const app = next({ dev }) +const handle = app.getRequestHandler() +const port = parseInt(process.env.PORT || '3000', 10) +const hostname = '0.0.0.0' + +console.log(`Starting server with configuration: +- NODE_ENV: ${process.env.NODE_ENV} +- PORT: ${port} +- HOSTNAME: ${hostname} +- MONGODB_CONNECTION_STRING: ${process.env.MONGODB_CONNECTION_STRING ? 'set' : 'not set'} +`) + +// Prepare the server +app.prepare().then(async () => { + try { + const server = express() + const mongoClient = new MongoClient(process.env.MONGODB_CONNECTION_STRING) + + try { + await mongoClient.connect() + console.log('Connected to MongoDB') + } catch (error) { + console.error('MongoDB connection error:', error) + // Don't throw here, we can still start the server + } + + // Let Next.js handle all routes + server.all('*', (req, res) => { + return handle(req, res) + }) + + const httpServer = createServer(server) + + // Add error handling for the server + httpServer.on('error', (error) => { + console.error('Server error:', error) + if (error.code === 'EADDRINUSE') { + console.error(`Port ${port} is already in use`) + process.exit(1) + } + }) + + // Add connection handling + httpServer.on('connection', (socket) => { + console.log('New connection from:', socket.remoteAddress) + }) + + // Start the server + httpServer.listen(port, hostname, () => { + console.log(`> Server is ready and listening on http://${hostname}:${port}`) + }) + + } catch (error) { + console.error('Failed to start server:', error) + process.exit(1) + } +}).catch((error) => { + console.error('Failed to prepare Next.js app:', error) + process.exit(1) +}) \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 66d53cbe6..f0384d747 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,13 +1,5 @@ version: '3.8' -volumes: - uploads: - driver: local - driver_opts: - type: none - o: bind - device: ./data/uploads - services: rowboat: build: @@ -30,10 +22,9 @@ services: - COPILOT_API_KEY=${COPILOT_API_KEY} - REDIS_URL=redis://redis:6379 - USE_RAG=${USE_RAG} - - QDRANT_URL=http://qdrant:6333 + - QDRANT_URL=${QDRANT_URL} - QDRANT_API_KEY=${QDRANT_API_KEY} - USE_RAG_UPLOADS=${USE_RAG_UPLOADS} - - USE_RAG_S3_UPLOADS=${USE_RAG_S3_UPLOADS} - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} - RAG_UPLOADS_S3_BUCKET=${RAG_UPLOADS_S3_BUCKET} @@ -46,11 +37,7 @@ services: - MAX_QUERIES_PER_MINUTE=${MAX_QUERIES_PER_MINUTE} - MAX_PROJECTS_PER_USER=${MAX_PROJECTS_PER_USER} - VOICE_API_URL=${VOICE_API_URL} - - PROVIDER_DEFAULT_MODEL=${PROVIDER_DEFAULT_MODEL} - - RAG_UPLOADS_DIR=/app/uploads restart: unless-stopped - volumes: - - uploads:/app/uploads rowboat_agents: build: @@ -63,13 +50,8 @@ services: - API_KEY=${AGENTS_API_KEY} - REDIS_URL=redis://redis:6379 - MONGODB_URI=mongodb://mongo:27017/rowboat - - QDRANT_URL=http://qdrant:6333 + - QDRANT_URL=${QDRANT_URL} - QDRANT_API_KEY=${QDRANT_API_KEY} - - PROVIDER_BASE_URL=${PROVIDER_BASE_URL} - - PROVIDER_API_KEY=${PROVIDER_API_KEY} - - PROVIDER_DEFAULT_MODEL=${PROVIDER_DEFAULT_MODEL} - - MAX_CALLS_PER_CHILD_AGENT=${MAX_CALLS_PER_CHILD_AGENT} - - ENABLE_TRACING=${ENABLE_TRACING} restart: unless-stopped copilot: @@ -81,10 +63,6 @@ services: environment: - OPENAI_API_KEY=${OPENAI_API_KEY} - API_KEY=${COPILOT_API_KEY} - - PROVIDER_BASE_URL=${PROVIDER_BASE_URL} - - PROVIDER_API_KEY=${PROVIDER_API_KEY} - - PROVIDER_DEFAULT_MODEL=${PROVIDER_DEFAULT_MODEL} - - PROVIDER_COPILOT_MODEL=${PROVIDER_COPILOT_MODEL} restart: unless-stopped # tools_webhook: @@ -111,28 +89,21 @@ services: build: context: ./apps/rowboat dockerfile: scripts.Dockerfile - command: ["sh", "-c", "npm run setupQdrant"] + command: ["sh", "-c", "npm run setupQdrant && echo 'index created successfully'"] profiles: [ "setup_qdrant" ] - depends_on: - qdrant: - condition: service_healthy environment: - - QDRANT_URL=http://qdrant:6333 + - QDRANT_URL=${QDRANT_URL} - QDRANT_API_KEY=${QDRANT_API_KEY} - - EMBEDDING_VECTOR_SIZE=${EMBEDDING_VECTOR_SIZE} restart: no delete_qdrant: build: context: ./apps/rowboat dockerfile: scripts.Dockerfile - command: ["sh", "-c", "npm run deleteQdrant"] + command: ["sh", "-c", "npm run deleteQdrant && echo 'index deleted successfully'"] profiles: [ "delete_qdrant" ] - depends_on: - qdrant: - condition: service_healthy environment: - - QDRANT_URL=http://qdrant:6333 + - QDRANT_URL=${QDRANT_URL} - QDRANT_API_KEY=${QDRANT_API_KEY} restart: no @@ -144,23 +115,15 @@ services: profiles: [ "rag_files_worker" ] environment: - OPENAI_API_KEY=${OPENAI_API_KEY} - - EMBEDDING_PROVIDER_BASE_URL=${EMBEDDING_PROVIDER_BASE_URL} - - EMBEDDING_PROVIDER_API_KEY=${EMBEDDING_PROVIDER_API_KEY} - - EMBEDDING_MODEL=${EMBEDDING_MODEL} - MONGODB_CONNECTION_STRING=mongodb://mongo:27017/rowboat - - REDIS_URL=redis://redis:6379 - GOOGLE_API_KEY=${GOOGLE_API_KEY} - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} - RAG_UPLOADS_S3_BUCKET=${RAG_UPLOADS_S3_BUCKET} - RAG_UPLOADS_S3_REGION=${RAG_UPLOADS_S3_REGION} - - QDRANT_URL=http://qdrant:6333 + - QDRANT_URL=${QDRANT_URL} - QDRANT_API_KEY=${QDRANT_API_KEY} - - RAG_UPLOADS_DIR=/app/uploads - - USE_GEMINI_FILE_PARSING=${USE_GEMINI_FILE_PARSING} restart: unless-stopped - volumes: - - uploads:/app/uploads rag_urls_worker: build: @@ -170,13 +133,9 @@ services: profiles: [ "rag_urls_worker" ] environment: - OPENAI_API_KEY=${OPENAI_API_KEY} - - EMBEDDING_PROVIDER_BASE_URL=${EMBEDDING_PROVIDER_BASE_URL} - - EMBEDDING_PROVIDER_API_KEY=${EMBEDDING_PROVIDER_API_KEY} - - EMBEDDING_MODEL=${EMBEDDING_MODEL} - MONGODB_CONNECTION_STRING=mongodb://mongo:27017/rowboat - - REDIS_URL=redis://redis:6379 - FIRECRAWL_API_KEY=${FIRECRAWL_API_KEY} - - QDRANT_URL=http://qdrant:6333 + - QDRANT_URL=${QDRANT_URL} - QDRANT_API_KEY=${QDRANT_API_KEY} restart: unless-stopped @@ -188,12 +147,8 @@ services: profiles: [ "rag_text_worker" ] environment: - OPENAI_API_KEY=${OPENAI_API_KEY} - - EMBEDDING_PROVIDER_BASE_URL=${EMBEDDING_PROVIDER_BASE_URL} - - EMBEDDING_PROVIDER_API_KEY=${EMBEDDING_PROVIDER_API_KEY} - - EMBEDDING_MODEL=${EMBEDDING_MODEL} - MONGODB_CONNECTION_STRING=mongodb://mongo:27017/rowboat - - REDIS_URL=redis://redis:6379 - - QDRANT_URL=http://qdrant:6333 + - QDRANT_URL=${QDRANT_URL} - QDRANT_API_KEY=${QDRANT_API_KEY} restart: unless-stopped @@ -244,21 +199,3 @@ services: # - ROWBOAT_API_HOST=http://rowboat:3000 # - MONGODB_URI=mongodb://mongo:27017/rowboat # restart: unless-stopped - - qdrant: - build: - context: . - dockerfile: Dockerfile.qdrant - ports: - - "6333:6333" - environment: - - QDRANT__STORAGE__STORAGE_PATH=/data/qdrant - restart: unless-stopped - profiles: [ "qdrant" ] - volumes: - - ./data/qdrant:/data/qdrant - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:6333/healthz"] - interval: 5s - timeout: 10s - retries: 3 diff --git a/k8s/arm64/app-config.yaml b/k8s/arm64/app-config.yaml new file mode 100644 index 000000000..fa25956d5 --- /dev/null +++ b/k8s/arm64/app-config.yaml @@ -0,0 +1,27 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: app-config +data: + REDIS_URL: "redis://redis:6379" + MONGODB_CONNECTION_STRING: "mongodb://admin:password@mongodb:27017/rowboat?authSource=admin" + AUTH0_DOMAIN: "" + AUTH0_AUDIENCE: "" + AUTH0_BASE_URL: "" + AUTH0_ISSUER_BASE_URL: "" + USE_AUTH: "false" + USE_RAG: "true" + AGENTS_API_URL: "http://rowboat-agents:3001" + COPILOT_API_URL: "http://copilot:3002" + USE_RAG_UPLOADS: "true" + USE_RAG_SCRAPING: "true" + USE_CHAT_WIDGET: "false" + CHAT_WIDGET_HOST: "http://localhost:3006" + MAX_QUERIES_PER_MINUTE: "60" + MAX_PROJECTS_PER_USER: "10" + VOICE_API_URL: "" + PROVIDER_DEFAULT_MODEL: "gpt-4" + PROVIDER_COPILOT_MODEL: "gpt-4" + PROVIDER_BASE_URL: "" + MAX_CALLS_PER_CHILD_AGENT: "10" + ENABLE_TRACING: "false" diff --git a/k8s/arm64/app-secrets.yaml b/k8s/arm64/app-secrets.yaml new file mode 100644 index 000000000..eaffd532a --- /dev/null +++ b/k8s/arm64/app-secrets.yaml @@ -0,0 +1,28 @@ +apiVersion: v1 +kind: Secret +metadata: + name: app-secrets +type: Opaque +stringData: + # Find these in your Auth0 application settings: + # 1. Client ID: At the top of your application settings page + # 2. Client Secret: Click "Show" next to Client Secret + # 3. Generate a random secret: Run this command in your terminal: + # openssl rand -hex 32 + AUTH0_CLIENT_ID: "" + AUTH0_CLIENT_SECRET: "-E3" + AUTH0_SECRET: "" + # API Keys and sensitive configuration + OPENAI_API_KEY: "" # Add your OpenAI API key + AGENTS_API_KEY: "" # Add your agents API key + COPILOT_API_KEY: "" # Add your copilot API key + QDRANT_URL: "http://localhost:6333" # Add your Qdrant URL + QDRANT_API_KEY: "" # Add your Qdrant API key + AWS_ACCESS_KEY_ID: "" # Add your AWS access key + AWS_SECRET_ACCESS_KEY: "" # Add your AWS secret key + RAG_UPLOADS_S3_BUCKET: "" # Add your S3 bucket name + RAG_UPLOADS_S3_REGION: "" # Add your S3 region + FIRECRAWL_API_KEY: "" # Add your Firecrawl API key + CHAT_WIDGET_SESSION_JWT_SECRET: "" # Add your JWT secret + PROVIDER_API_KEY: "" # Add your provider API key + # Removed AUTH0_BASE_URL and AUTH0_ISSUER_BASE_URL as they are non-sensitive and already in app-config.yaml diff --git a/k8s/arm64/apply.sh b/k8s/arm64/apply.sh new file mode 100644 index 000000000..f1ee57f28 --- /dev/null +++ b/k8s/arm64/apply.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +# Apply all configurations +kubectl apply -f app-config.yaml +kubectl apply -f app-secrets.yaml +kubectl apply -f openai-secret.yaml +kubectl apply -f copilot-deployment.yaml +kubectl apply -f mongodb-vpc.yaml +kubectl apply -f mongodb-deployment.yaml +kubectl apply -f redis-deployment.yaml +kubectl apply -f rowboat-agents-go-deployment.yaml +kubectl apply -f rowboat-deployment.yaml + +# Wait for 2 seconds before starting restarts +sleep 2 + +# Restart deployments with delays between each +echo "Restarting redis deployment..." +kubectl rollout restart deployment/redis +sleep 2 + +echo "Restarting rowboat-agents deployment..." +kubectl rollout restart deployment/rowboat-agents +sleep 2 + +echo "Restarting copilot deployment..." +kubectl rollout restart deployment/copilot +sleep 2 + +echo "Restarting rowboat deployment..." +kubectl rollout restart deployment/rowboat \ No newline at end of file diff --git a/k8s/arm64/cleanup.sh b/k8s/arm64/cleanup.sh new file mode 100755 index 000000000..c1acdb658 --- /dev/null +++ b/k8s/arm64/cleanup.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +echo "Starting cleanup of Kubernetes resources..." + +# Delete all deployments +echo "Deleting deployments..." +kubectl delete deployment rowboat +kubectl delete deployment rowboat-agents +kubectl delete deployment copilot +kubectl delete deployment docs +kubectl delete deployment redis +#kubectl delete deployment mongodb + +# Delete all secrets +echo "Deleting secrets..." +kubectl delete secret app-secrets +kubectl delete secret openai-secret + +# Delete all configmaps +echo "Deleting configmaps..." +kubectl delete configmap app-config + +# Wait for pods to terminate +echo "Waiting for pods to terminate..." +while kubectl get pods | grep -q "Terminating"; do + echo "Pods still terminating..." + sleep 5 +done + +echo "Cleanup complete!" diff --git a/k8s/arm64/copilot-deployment.yaml b/k8s/arm64/copilot-deployment.yaml new file mode 100644 index 000000000..00c69e025 --- /dev/null +++ b/k8s/arm64/copilot-deployment.yaml @@ -0,0 +1,80 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: copilot + labels: + app: copilot +spec: + replicas: 1 + selector: + matchLabels: + app: copilot + template: + metadata: + labels: + app: copilot + spec: + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: rowboat-cluster + operator: In + values: + - "true" + containers: + - name: copilot + image: stevef1uk/copilot:arm64 + ports: + - containerPort: 3002 + env: + - name: OPENAI_API_KEY + valueFrom: + secretKeyRef: + name: app-secrets + key: OPENAI_API_KEY + - name: API_KEY + valueFrom: + secretKeyRef: + name: app-secrets + key: COPILOT_API_KEY + - name: PROVIDER_BASE_URL + valueFrom: + configMapKeyRef: + name: app-config + key: PROVIDER_BASE_URL + - name: PROVIDER_API_KEY + valueFrom: + secretKeyRef: + name: app-secrets + key: PROVIDER_API_KEY + - name: PROVIDER_DEFAULT_MODEL + valueFrom: + configMapKeyRef: + name: app-config + key: PROVIDER_DEFAULT_MODEL + - name: PROVIDER_COPILOT_MODEL + valueFrom: + configMapKeyRef: + name: app-config + key: PROVIDER_COPILOT_MODEL + resources: + requests: + cpu: "500m" + memory: "512Mi" + limits: + cpu: "1" + memory: "1Gi" +--- +apiVersion: v1 +kind: Service +metadata: + name: copilot +spec: + selector: + app: copilot + ports: + - port: 3002 + targetPort: 3002 + type: ClusterIP \ No newline at end of file diff --git a/k8s/arm64/docs-deployment.yaml b/k8s/arm64/docs-deployment.yaml new file mode 100644 index 000000000..7f8bcfd3d --- /dev/null +++ b/k8s/arm64/docs-deployment.yaml @@ -0,0 +1,38 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: docs +spec: + replicas: 1 + selector: + matchLabels: + app: docs + template: + metadata: + labels: + app: docs + spec: + containers: + - name: docs + image: your-registry/docs:arm64 + ports: + - containerPort: 80 + resources: + requests: + cpu: 100m + memory: 128Mi + limits: + cpu: 500m + memory: 256Mi +--- +apiVersion: v1 +kind: Service +metadata: + name: docs +spec: + selector: + app: docs + ports: + - port: 80 + targetPort: 80 + type: ClusterIP \ No newline at end of file diff --git a/k8s/arm64/mongodb-deployment.yaml b/k8s/arm64/mongodb-deployment.yaml new file mode 100644 index 000000000..9bdfb78eb --- /dev/null +++ b/k8s/arm64/mongodb-deployment.yaml @@ -0,0 +1,72 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: mongodb + labels: + app: mongodb +spec: + replicas: 1 + selector: + matchLabels: + app: mongodb + template: + metadata: + labels: + app: mongodb + spec: + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: rowboat-cluster + operator: In + values: + - "true" + containers: + - name: mongodb + image: mongo:4.4.18 + ports: + - containerPort: 27017 + resources: + requests: + cpu: "100m" + memory: "256Mi" + limits: + cpu: "200m" + memory: "512Mi" + env: + - name: MONGO_INITDB_ROOT_USERNAME + value: "admin" + - name: MONGO_INITDB_ROOT_PASSWORD + value: "password" + volumeMounts: + - name: mongodb-data + mountPath: /data/db + volumes: + - name: mongodb-data + persistentVolumeClaim: + claimName: mongodb-pvc +--- +apiVersion: v1 +kind: Service +metadata: + name: mongodb +spec: + selector: + app: mongodb + ports: + - port: 27017 + targetPort: 27017 + type: ClusterIP +--- +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: mongodb-pvc +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 1Gi diff --git a/k8s/arm64/mongodb-vpc.yaml b/k8s/arm64/mongodb-vpc.yaml new file mode 100644 index 000000000..822f07151 --- /dev/null +++ b/k8s/arm64/mongodb-vpc.yaml @@ -0,0 +1,10 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: mongodb-pvc +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 1Gi diff --git a/k8s/arm64/openai-secret.yaml b/k8s/arm64/openai-secret.yaml new file mode 100644 index 000000000..537f858a1 --- /dev/null +++ b/k8s/arm64/openai-secret.yaml @@ -0,0 +1,7 @@ +apiVersion: v1 +kind: Secret +metadata: + name: openai-secret +type: Opaque +data: + api-key: # Base 64 OpenAPI key diff --git a/k8s/arm64/redis-deployment.yaml b/k8s/arm64/redis-deployment.yaml new file mode 100644 index 000000000..36730d005 --- /dev/null +++ b/k8s/arm64/redis-deployment.yaml @@ -0,0 +1,49 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: redis + labels: + app: redis +spec: + replicas: 1 + selector: + matchLabels: + app: redis + template: + metadata: + labels: + app: redis + spec: + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: rowboat-cluster + operator: In + values: + - "true" + containers: + - name: redis + image: redis:7.2-alpine + ports: + - containerPort: 6379 + resources: + requests: + cpu: "100m" + memory: "128Mi" + limits: + cpu: "200m" + memory: "256Mi" +--- +apiVersion: v1 +kind: Service +metadata: + name: redis +spec: + selector: + app: redis + ports: + - port: 6379 + targetPort: 6379 + type: ClusterIP \ No newline at end of file diff --git a/k8s/arm64/rowboat-agents-deployment.yaml b/k8s/arm64/rowboat-agents-deployment.yaml new file mode 100644 index 000000000..a0124fbbb --- /dev/null +++ b/k8s/arm64/rowboat-agents-deployment.yaml @@ -0,0 +1,74 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: rowboat-agents + labels: + app: rowboat-agents +spec: + replicas: 1 + selector: + matchLabels: + app: rowboat-agents + template: + metadata: + labels: + app: rowboat-agents + spec: + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: rowboat-cluster + operator: In + values: + - "true" + containers: + - name: rowboat-agents + image: stevef1uk/rowboat-agents:arm64 + ports: + - containerPort: 3001 + env: + - name: NODE_ENV + value: "production" + - name: PORT + value: "3001" + - name: OPENAI_API_KEY + valueFrom: + secretKeyRef: + name: app-secrets + key: OPENAI_API_KEY + - name: PROVIDER_BASE_URL + valueFrom: + configMapKeyRef: + name: app-config + key: PROVIDER_BASE_URL + - name: PROVIDER_API_KEY + valueFrom: + secretKeyRef: + name: app-secrets + key: PROVIDER_API_KEY + - name: PROVIDER_DEFAULT_MODEL + valueFrom: + configMapKeyRef: + name: app-config + key: PROVIDER_DEFAULT_MODEL + resources: + requests: + memory: "512Mi" + cpu: "500m" + limits: + memory: "1Gi" + cpu: "1000m" +--- +apiVersion: v1 +kind: Service +metadata: + name: rowboat-agents +spec: + selector: + app: rowboat-agents + ports: + - port: 3001 + targetPort: 3001 + type: ClusterIP \ No newline at end of file diff --git a/k8s/arm64/rowboat-agents-go-deployment.yaml b/k8s/arm64/rowboat-agents-go-deployment.yaml new file mode 100644 index 000000000..2795d9ffe --- /dev/null +++ b/k8s/arm64/rowboat-agents-go-deployment.yaml @@ -0,0 +1,119 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: rowboat-agents + labels: + app: rowboat-agents +spec: + replicas: 1 + selector: + matchLabels: + app: rowboat-agents + template: + metadata: + labels: + app: rowboat-agents + spec: + containers: + - name: rowboat-agents + image: stevef1uk/rowboat_agents_go:arm64 + imagePullPolicy: Always + ports: + - containerPort: 3001 + env: + - name: PORT + value: "3001" + - name: MONGODB_CONNECTION_STRING + value: "mongodb://admin:password@mongodb:27017/rowboat?authSource=admin" + - name: REDIS_URL + valueFrom: + configMapKeyRef: + name: app-config + key: REDIS_URL + - name: AUTH0_DOMAIN + valueFrom: + configMapKeyRef: + name: app-config + key: AUTH0_DOMAIN + - name: AUTH0_AUDIENCE + valueFrom: + configMapKeyRef: + name: app-config + key: AUTH0_AUDIENCE + - name: AUTH0_BASE_URL + valueFrom: + configMapKeyRef: + name: app-config + key: AUTH0_BASE_URL + - name: AUTH0_ISSUER_BASE_URL + valueFrom: + configMapKeyRef: + name: app-config + key: AUTH0_ISSUER_BASE_URL + - name: OPENAI_API_KEY + valueFrom: + secretKeyRef: + name: openai-secret + key: api-key + - name: ENABLE_AGENT_SWITCHING + value: "false" + - name: DEFAULT_AGENT + value: "Example Agent" + - name: AGENT_CONFIGS + value: | + [{ + "name": "Example Agent", + "type": "conversation", + "description": "An example agent", + "instructions": "## šŸ§‘ā€ Role:\nYou are an helpful customer support assistant\n\n---\n## āš™ļø Steps to Follow:\n1. Ask the user what they would like help with\n2. Ask the user for their email address and let them know someone will contact them soon.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Asking the user their issue\n- Getting their email\n\nāŒ Out of Scope:\n- Questions unrelated to customer support\n- If a question is out of scope, politely inform the user and avoid providing an answer.\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- ask user their issue\n\nāŒ Don'ts:\n- don't ask user any other detail than email", + "model": "gpt-4o-2024-08-06", + "controlType": "retain", + "tools": [] + }] + - name: AGENT_CONNECTIONS + value: '{}' + - name: PROVIDER_BASE_URL + valueFrom: + configMapKeyRef: + name: app-config + key: PROVIDER_BASE_URL + - name: PROVIDER_API_KEY + valueFrom: + secretKeyRef: + name: app-secrets + key: PROVIDER_API_KEY + - name: PROVIDER_DEFAULT_MODEL + valueFrom: + configMapKeyRef: + name: app-config + key: PROVIDER_DEFAULT_MODEL + - name: MAX_CALLS_PER_CHILD_AGENT + valueFrom: + configMapKeyRef: + name: app-config + key: MAX_CALLS_PER_CHILD_AGENT + - name: ENABLE_TRACING + valueFrom: + configMapKeyRef: + name: app-config + key: ENABLE_TRACING + resources: + requests: + memory: "256Mi" + cpu: "250m" + limits: + memory: "512Mi" + cpu: "500m" +--- +apiVersion: v1 +kind: Service +metadata: + name: rowboat-agents +spec: + selector: + app: rowboat-agents + ports: + - port: 3001 + targetPort: 3001 + type: ClusterIP +--- diff --git a/k8s/arm64/rowboat-deployment.yaml b/k8s/arm64/rowboat-deployment.yaml new file mode 100644 index 000000000..27928ddfe --- /dev/null +++ b/k8s/arm64/rowboat-deployment.yaml @@ -0,0 +1,154 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: rowboat + labels: + app: rowboat +spec: + replicas: 1 + selector: + matchLabels: + app: rowboat + template: + metadata: + labels: + app: rowboat + spec: + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: rowboat-cluster + operator: In + values: + - "true" + containers: + - name: rowboat + image: stevef1uk/rowboat:arm64 + imagePullPolicy: Always + ports: + - containerPort: 3000 + env: + - name: NODE_ENV + value: "production" + - name: DEBUG + value: "next:*,mongodb:*,express:*,app:*,MongoClient:*,MongoDB:*" + - name: MONGODB_DEBUG + value: "true" + - name: MONGODB_DRIVER_DEBUG + value: "true" + - name: HOSTNAME + value: "0.0.0.0" + - name: PORT + value: "3000" + - name: REDIS_URL + valueFrom: + configMapKeyRef: + name: app-config + key: REDIS_URL + - name: MONGODB_CONNECTION_STRING + valueFrom: + configMapKeyRef: + name: app-config + key: MONGODB_CONNECTION_STRING + - name: AUTH0_DOMAIN + valueFrom: + configMapKeyRef: + name: app-config + key: AUTH0_DOMAIN + - name: AUTH0_AUDIENCE + valueFrom: + configMapKeyRef: + name: app-config + key: AUTH0_AUDIENCE + - name: AUTH0_BASE_URL + valueFrom: + configMapKeyRef: + name: app-config + key: AUTH0_BASE_URL + - name: AUTH0_ISSUER_BASE_URL + valueFrom: + configMapKeyRef: + name: app-config + key: AUTH0_ISSUER_BASE_URL + - name: AUTH0_CLIENT_ID + valueFrom: + secretKeyRef: + name: app-secrets + key: AUTH0_CLIENT_ID + - name: AUTH0_CLIENT_SECRET + valueFrom: + secretKeyRef: + name: app-secrets + key: AUTH0_CLIENT_SECRET + - name: AUTH0_SECRET + valueFrom: + secretKeyRef: + name: app-secrets + key: AUTH0_SECRET + - name: AGENTS_API_URL + value: "http://rowboat-agents:3001" + - name: AGENTS_API_KEY + valueFrom: + secretKeyRef: + name: app-secrets + key: AGENTS_API_KEY + - name: COPILOT_API_URL + value: "http://copilot:3002" + - name: COPILOT_API_KEY + valueFrom: + secretKeyRef: + name: app-secrets + key: COPILOT_API_KEY + - name: OPENAI_API_KEY + valueFrom: + secretKeyRef: + name: app-secrets + key: OPENAI_API_KEY + - name: ENABLE_AGENT_SWITCHING + value: "true" + - name: DEFAULT_AGENT + value: "Example Agent" + - name: AGENT_SWITCHING_RULES + value: '{"Story Agent": {"keywords": ["story", "tell me a", "write a", "narrative", "tale"]}}' + - name: USE_AUTH + valueFrom: + configMapKeyRef: + name: app-config + key: USE_AUTH + - name: PROVIDER_DEFAULT_MODEL + valueFrom: + configMapKeyRef: + name: app-config + key: PROVIDER_DEFAULT_MODEL + - name: PROVIDER_BASE_URL + valueFrom: + configMapKeyRef: + name: app-config + key: PROVIDER_BASE_URL + - name: PROVIDER_API_KEY + valueFrom: + secretKeyRef: + name: app-secrets + key: PROVIDER_API_KEY + resources: + requests: + memory: "512Mi" + cpu: "500m" + limits: + memory: "1Gi" + cpu: "1000m" +--- +apiVersion: v1 +kind: Service +metadata: + name: rowboat +spec: + selector: + app: rowboat + ports: + - port: 3000 + targetPort: 3000 + nodePort: 30000 + type: NodePort \ No newline at end of file diff --git a/k8s/arm64/rowboat-service.yaml b/k8s/arm64/rowboat-service.yaml new file mode 100644 index 000000000..b68ccfe50 --- /dev/null +++ b/k8s/arm64/rowboat-service.yaml @@ -0,0 +1,13 @@ +apiVersion: v1 +kind: Service +metadata: + name: rowboat-service + namespace: default +spec: + selector: + app: rowboat # This should match the labels in your Rowboat pod/deployment + ports: + - protocol: TCP + port: 3000 + targetPort: 3000 # This should match the port your Rowboat app is listening on + type: ClusterIP # This makes the service only accessible within the cluster diff --git a/k8s/arm64/rowboat_agents-deployment.yaml b/k8s/arm64/rowboat_agents-deployment.yaml new file mode 100644 index 000000000..fe5075d1c --- /dev/null +++ b/k8s/arm64/rowboat_agents-deployment.yaml @@ -0,0 +1,98 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: rowboat-agents + labels: + app: rowboat-agents +spec: + replicas: 1 + selector: + matchLabels: + app: rowboat-agents + template: + metadata: + labels: + app: rowboat-agents + spec: + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: rowboat-cluster + operator: In + values: + - "true" + containers: + - name: rowboat-agents + image: stevef1uk/rowboat_agents:arm64 + ports: + - containerPort: 3001 + env: + - name: PORT + value: "3001" + - name: MONGODB_CONNECTION_STRING + value: "mongodb://admin:password@mongodb:27017/rowboat?authSource=admin" + - name: REDIS_URL + value: "redis://redis:6379" + - name: AUTH0_DOMAIN + valueFrom: + configMapKeyRef: + name: app-config + key: AUTH0_DOMAIN + - name: AUTH0_AUDIENCE + valueFrom: + configMapKeyRef: + name: app-config + key: AUTH0_AUDIENCE + - name: AUTH0_BASE_URL + valueFrom: + configMapKeyRef: + name: app-config + key: AUTH0_BASE_URL + - name: AUTH0_ISSUER_BASE_URL + valueFrom: + configMapKeyRef: + name: app-config + key: AUTH0_ISSUER_BASE_URL + - name: OPENAI_API_KEY + valueFrom: + secretKeyRef: + name: openai-secret + key: api-key + - name: ENABLE_AGENT_SWITCHING + value: "false" + - name: DEFAULT_AGENT + value: "Example Agent" + - name: AGENT_CONFIGS + value: | + [{ + "name": "Example Agent", + "type": "conversation", + "description": "An example agent", + "instructions": "## šŸ§‘ā€ Role:\nYou are an helpful customer support assistant\n\n---\n## āš™ļø Steps to Follow:\n1. Ask the user what they would like help with\n2. Ask the user for their email address and let them know someone will contact them soon.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Asking the user their issue\n- Getting their email\n\nāŒ Out of Scope:\n- Questions unrelated to customer support\n- If a question is out of scope, politely inform the user and avoid providing an answer.\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- ask user their issue\n\nāŒ Don'ts:\n- don't ask user any other detail than email", + "model": "gpt-4o-2024-08-06", + "controlType": "retain", + "tools": [] + }] + - name: AGENT_CONNECTIONS + value: '{}' + resources: + limits: + cpu: "1" + memory: "1Gi" + requests: + cpu: "500m" + memory: "512Mi" +--- +apiVersion: v1 +kind: Service +metadata: + name: rowboat-agents +spec: + selector: + app: rowboat-agents + ports: + - port: 3001 + targetPort: 3001 + type: ClusterIP \ No newline at end of file diff --git a/k8s/arm64/setup-port-forwards.sh b/k8s/arm64/setup-port-forwards.sh new file mode 100755 index 000000000..824204c62 --- /dev/null +++ b/k8s/arm64/setup-port-forwards.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +# Function to check if a port is already in use +check_port() { + if lsof -i :$1 > /dev/null 2>&1; then + echo "Port $1 is already in use. Please free up this port and try again." + exit 1 + fi +} + +# Check all ports before starting +echo "Checking if ports are available..." +check_port 3000 + +# Start port forwards in the background +echo "Setting up port forwards..." +kubectl port-forward service/rowboat 3000:3000 & +echo $! > port-forwards.pid + + +echo "Port forwards are running. You can access:" +echo "- Main UI: http://localhost:3000" +echo "" +echo "To stop the port forwards, run: ./stop-port-forwards.sh" diff --git a/k8s/arm64/start_up.sh b/k8s/arm64/start_up.sh new file mode 100755 index 000000000..f79696839 --- /dev/null +++ b/k8s/arm64/start_up.sh @@ -0,0 +1,11 @@ +kubectl apply -f app-config.yaml +kubectl apply -f app-secrets.yaml +kubectl apply -f openai-secret.yaml +kubectl apply -f copilot-deployment.yaml +kubectl apply -f mongodb-vpc.yaml +kubectl apply -f mongodb-deployment.yaml +kubectl apply -f redis-deployment.yaml +#kubectl apply -f rowboat-agents-go-deployment.yaml +kubectl apply -f rowboat_agents-deployment.yaml +sleep 4 +kubectl apply -f rowboat-deployment.yaml diff --git a/k8s/arm64/stop-port-forwards.sh b/k8s/arm64/stop-port-forwards.sh new file mode 100755 index 000000000..93d155383 --- /dev/null +++ b/k8s/arm64/stop-port-forwards.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +echo "Stopping all kubectl port-forward processes..." +pkill -f "kubectl port-forward" + +echo "Stopping processes on port 3000..." +# Find processes using port 3000 and kill them +lsof -ti:3000 | xargs kill -9 2>/dev/null || true + +# Also try to clean up any PID file if it exists +if [ -f port-forwards.pid ]; then + rm port-forwards.pid +fi + +echo "All port forwards and processes on port 3000 have been stopped." \ No newline at end of file From d098b426f0379f3ad51971457bb358c563e5119f Mon Sep 17 00:00:00 2001 From: stevef1uk Date: Wed, 28 May 2025 09:13:01 +0200 Subject: [PATCH 2/3] working omn k8s with litellm deployed on k8s too --- README.md | 1 + apps/copilot/client.py | 4 +- apps/docs/docs/using_rag.md | 53 +- apps/rowboat/Dockerfile | 66 +- .../app/projects/[projectId]/config/app.tsx | 2 +- .../[projectId]/config/components/tools.tsx | 2 +- apps/rowboat/next.config.mjs | 42 - apps/rowboat/package-lock.json | 692 +-- apps/rowboat/package.json | 1 - apps/rowboat/server.js | 67 - apps/rowboat_agents/Dockerfile | 26 +- apps/rowboat_agents/pyproject.toml | 5 +- apps/rowboat_agents/requirements.txt | 6 +- apps/rowboat_agents/src/graph/core.py | 19 +- apps/rowboat_agents/src/graph/execute_turn.py | 387 +- apps/rowboat_agents/src/utils/client.py | 9 +- apps/rowboat_agents/src/utils/common.py | 88 +- .../src/utils/model_provider.py | 174 + custom_handler.py | 990 ++++ docker-compose.yml | 81 +- k8s/arm64/app-config.yaml | 16 +- k8s/arm64/app-secrets.yaml | 12 +- k8s/arm64/apply.sh | 31 - k8s/arm64/copilot-deployment.yaml | 7 +- k8s/arm64/openai-secret.yaml | 2 +- k8s/arm64/rowboat-agents-deployment.yaml | 29 +- k8s/arm64/rowboat-agents-go-deployment.yaml | 2 +- k8s/arm64/rowboat_agents-deployment.yaml | 63 +- k8s/arm64/start_up.sh | 6 +- litellm/Dockerfile | 34 + litellm/README.md | 539 +++ litellm/build.sh | 25 + litellm/config.yaml | 70 + litellm/k8s/configmap.yaml | 80 + litellm/k8s/deployment.yaml | 123 + litellm/k8s/kustomization.yaml | 11 + litellm/k8s/secrets.yaml | 30 + litellm/k8s/service.yaml | 14 + litellm/litellm/__init__.py | 0 litellm/litellm/custom_handler.py | 917 ++++ litellm/requirements.txt | 2 + litellm/schema.prisma | 17 + litellm/setup.py | 16 + litellm/test_modal.py | 51 + litellm/test_modal_handler.py | 409 ++ rowboat_agents/.dockerignore | 5 + rowboat_agents/.env.example | 2 + rowboat_agents/.gitignore | 40 + rowboat_agents/Dockerfile | 28 + rowboat_agents/NOTICE.md | 43 + rowboat_agents/README.md | 114 + rowboat_agents/__init__.py | 0 rowboat_agents/configs/default_config.json | 11 + rowboat_agents/poetry.lock | 3996 +++++++++++++++++ rowboat_agents/pyproject.toml | 110 + rowboat_agents/requirements.txt | 153 + rowboat_agents/src/__init__.py | 0 rowboat_agents/src/app/__init__.py | 0 rowboat_agents/src/app/main.py | 184 + rowboat_agents/src/graph/__init__.py | 0 rowboat_agents/src/graph/core.py | 647 +++ rowboat_agents/src/graph/execute_turn.py | 730 +++ rowboat_agents/src/graph/guardrails.py | 218 + rowboat_agents/src/graph/helpers/access.py | 48 + rowboat_agents/src/graph/helpers/control.py | 48 + .../src/graph/helpers/instructions.py | 44 + .../src/graph/helpers/library_tools.py | 268 ++ rowboat_agents/src/graph/helpers/state.py | 60 + rowboat_agents/src/graph/helpers/transfer.py | 44 + rowboat_agents/src/graph/instructions.py | 105 + rowboat_agents/src/graph/tool_calling.py | 148 + rowboat_agents/src/graph/tools.py | 46 + rowboat_agents/src/graph/tracing.py | 212 + rowboat_agents/src/graph/types.py | 26 + rowboat_agents/src/utils/__init__.py | 0 rowboat_agents/src/utils/client.py | 38 + rowboat_agents/src/utils/common.py | 210 + rowboat_agents/tests/__init__.py | 0 rowboat_agents/tests/app_client.py | 22 + rowboat_agents/tests/app_client_streaming.py | 137 + rowboat_agents/tests/interactive.py | 207 + .../sample_requests/default_example.json | 235 + .../tests/sample_requests/example1.json | 424 ++ .../tests/sample_requests/example2.json | 121 + .../tests/sample_requests/example3.json | 94 + .../tests/sample_requests/tmp1.json | 469 ++ .../tests/sample_requests/tmp2.json | 167 + .../tests/sample_requests/tmp3.json | 385 ++ .../tests/sample_requests/tmp4.json | 200 + .../sample_responses/default_example.json | 110 + tests/test_llm_transformation.py | 199 + tests/test_modal_handler.py | 408 ++ tests/test_tool_call_transformation.py | 409 ++ tests/test_tool_call_transformation.py.safe | 396 ++ 94 files changed, 15914 insertions(+), 868 deletions(-) delete mode 100644 apps/rowboat/server.js create mode 100644 apps/rowboat_agents/src/utils/model_provider.py create mode 100644 custom_handler.py delete mode 100644 k8s/arm64/apply.sh create mode 100644 litellm/Dockerfile create mode 100644 litellm/README.md create mode 100755 litellm/build.sh create mode 100644 litellm/config.yaml create mode 100644 litellm/k8s/configmap.yaml create mode 100644 litellm/k8s/deployment.yaml create mode 100644 litellm/k8s/kustomization.yaml create mode 100644 litellm/k8s/secrets.yaml create mode 100644 litellm/k8s/service.yaml create mode 100644 litellm/litellm/__init__.py create mode 100644 litellm/litellm/custom_handler.py create mode 100644 litellm/requirements.txt create mode 100644 litellm/schema.prisma create mode 100644 litellm/setup.py create mode 100644 litellm/test_modal.py create mode 100644 litellm/test_modal_handler.py create mode 100644 rowboat_agents/.dockerignore create mode 100644 rowboat_agents/.env.example create mode 100644 rowboat_agents/.gitignore create mode 100644 rowboat_agents/Dockerfile create mode 100644 rowboat_agents/NOTICE.md create mode 100644 rowboat_agents/README.md create mode 100644 rowboat_agents/__init__.py create mode 100644 rowboat_agents/configs/default_config.json create mode 100644 rowboat_agents/poetry.lock create mode 100644 rowboat_agents/pyproject.toml create mode 100644 rowboat_agents/requirements.txt create mode 100644 rowboat_agents/src/__init__.py create mode 100644 rowboat_agents/src/app/__init__.py create mode 100644 rowboat_agents/src/app/main.py create mode 100644 rowboat_agents/src/graph/__init__.py create mode 100644 rowboat_agents/src/graph/core.py create mode 100644 rowboat_agents/src/graph/execute_turn.py create mode 100644 rowboat_agents/src/graph/guardrails.py create mode 100644 rowboat_agents/src/graph/helpers/access.py create mode 100644 rowboat_agents/src/graph/helpers/control.py create mode 100644 rowboat_agents/src/graph/helpers/instructions.py create mode 100644 rowboat_agents/src/graph/helpers/library_tools.py create mode 100644 rowboat_agents/src/graph/helpers/state.py create mode 100644 rowboat_agents/src/graph/helpers/transfer.py create mode 100644 rowboat_agents/src/graph/instructions.py create mode 100644 rowboat_agents/src/graph/tool_calling.py create mode 100644 rowboat_agents/src/graph/tools.py create mode 100644 rowboat_agents/src/graph/tracing.py create mode 100644 rowboat_agents/src/graph/types.py create mode 100644 rowboat_agents/src/utils/__init__.py create mode 100644 rowboat_agents/src/utils/client.py create mode 100644 rowboat_agents/src/utils/common.py create mode 100644 rowboat_agents/tests/__init__.py create mode 100644 rowboat_agents/tests/app_client.py create mode 100644 rowboat_agents/tests/app_client_streaming.py create mode 100644 rowboat_agents/tests/interactive.py create mode 100644 rowboat_agents/tests/sample_requests/default_example.json create mode 100644 rowboat_agents/tests/sample_requests/example1.json create mode 100644 rowboat_agents/tests/sample_requests/example2.json create mode 100644 rowboat_agents/tests/sample_requests/example3.json create mode 100644 rowboat_agents/tests/sample_requests/tmp1.json create mode 100644 rowboat_agents/tests/sample_requests/tmp2.json create mode 100644 rowboat_agents/tests/sample_requests/tmp3.json create mode 100644 rowboat_agents/tests/sample_requests/tmp4.json create mode 100644 rowboat_agents/tests/sample_responses/default_example.json create mode 100644 tests/test_llm_transformation.py create mode 100644 tests/test_modal_handler.py create mode 100644 tests/test_tool_call_transformation.py create mode 100644 tests/test_tool_call_transformation.py.safe diff --git a/README.md b/README.md index c55358fcc..93780f764 100644 --- a/README.md +++ b/README.md @@ -317,3 +317,4 @@ To update API keys or other secrets: kubectl rollout restart deployment/copilot kubectl rollout restart deployment/rowboat ``` +# Test commit diff --git a/apps/copilot/client.py b/apps/copilot/client.py index d4dce2862..69fb98f2a 100644 --- a/apps/copilot/client.py +++ b/apps/copilot/client.py @@ -24,8 +24,8 @@ if PROVIDER_BASE_URL: print(f"Using provider {PROVIDER_BASE_URL}, for completions") completions_client = OpenAI( - base_url=PROVIDER_BASE_URL, - api_key=PROVIDER_API_KEY + base_url=PROVIDER_BASE_URL, + default_headers={"Authorization": f"Bearer {PROVIDER_API_KEY}"} ) else: print(f"Using OpenAI directly for completions") diff --git a/apps/docs/docs/using_rag.md b/apps/docs/docs/using_rag.md index a60946520..52deefadf 100644 --- a/apps/docs/docs/using_rag.md +++ b/apps/docs/docs/using_rag.md @@ -1,12 +1,12 @@ # Using RAG in Rowboat -Rowboat provides multiple ways to enhance your agents with Retrieval-Augmented Generation (RAG). This guide will help you set up and use each RAG feature. +Rowboat provides multiple ways to enhance your agents' context with Retrieval-Augmented Generation (RAG). This guide will help you set up and use each RAG features. ## Quick Start Text RAG and local file uploads are enabled by default - no configuration needed! Just start using them right away. -## Available RAG Features +## RAG Features ### 1. Text RAG āœ… Enabled by default: @@ -21,8 +21,28 @@ Text RAG and local file uploads are enabled by default - no configuration needed - Files are stored locally - No configuration required - Files are parsed using OpenAI by default +- For larger files, we recommend using Gemini models - see section below. -### 3. S3 File Uploads +#### 2.1 Using Gemini for File Parsing +To use Google's Gemini model for parsing uploaded PDFs, set the following variable: + +```bash +# Enable Gemini for file parsing +export USE_GEMINI_FILE_PARSING=true +export GOOGLE_API_KEY=your_google_api_key +``` + +### 3. URL Scraping +Rowboat uses Firecrawl for URL scraping. To enable URL scraping, set the following variables: + +```bash +export USE_RAG_SCRAPING=true +export FIRECRAWL_API_KEY=your_firecrawl_api_key +``` + +## Advanced RAG features + +### 1. File Uploads Backed by S3 To enable S3 file uploads, set the following variables: ```bash @@ -36,20 +56,8 @@ export RAG_UPLOADS_S3_BUCKET=your_bucket_name export RAG_UPLOADS_S3_REGION=your_region ``` -### 4. URL Scraping -To enable URL scraping, set the following variables: +### 2. Changing Default Parsing Model -```bash -# Enable URL scraping -export USE_RAG_SCRAPING=true - -# Firecrawl API key for web scraping -export FIRECRAWL_API_KEY=your_firecrawl_api_key -``` - -## File Parsing Options - -### Default Parsing (OpenAI) By default, uploaded PDF files are parsed using `gpt-4o`. You can customize this by setting the following: ```bash @@ -64,16 +72,7 @@ export FILE_PARSING_PROVIDER_BASE_URL=your-provider-base-url export FILE_PARSING_PROVIDER_API_KEY=your-provider-api-key ``` -### Using Gemini for File Parsing -To use Google's Gemini model for parsing uploaded PDFs, set the following variable: - -```bash -# Enable Gemini for file parsing -export USE_GEMINI_FILE_PARSING=true -export GOOGLE_API_KEY=your_google_api_key -``` - -## Embedding Model options +### 3. Embedding Model Options By default, Rowboat uses OpenAI's `text-embedding-3-small` model for generating embeddings. You can customize this by setting the following: @@ -101,4 +100,4 @@ export EMBEDDING_PROVIDER_BASE_URL=your-provider-base-url export EMBEDDING_PROVIDER_API_KEY=your-provider-api-key ``` -If you don't specify the provider settings, Rowboat will use OpenAI as the default provider. \ No newline at end of file +If you don't specify the provider settings, Rowboat will use OpenAI as the default provider. diff --git a/apps/rowboat/Dockerfile b/apps/rowboat/Dockerfile index a22719a14..a6737c9b8 100644 --- a/apps/rowboat/Dockerfile +++ b/apps/rowboat/Dockerfile @@ -1,37 +1,67 @@ # syntax=docker.io/docker/dockerfile:1 -FROM node:20-alpine AS builder +FROM node:18-alpine AS base +# Install dependencies only when needed +FROM base AS deps +# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed. +RUN apk add --no-cache libc6-compat WORKDIR /app -# Copy package files -COPY package.json package-lock.json ./ +# Install dependencies based on the preferred package manager +COPY package.json yarn.lock* package-lock.json* pnpm-lock.yaml* .npmrc* ./ +RUN \ + if [ -f yarn.lock ]; then yarn --frozen-lockfile; \ + elif [ -f package-lock.json ]; then npm ci; \ + elif [ -f pnpm-lock.yaml ]; then corepack enable pnpm && pnpm i --frozen-lockfile; \ + else echo "Lockfile not found." && exit 1; \ + fi -# Install dependencies -RUN npm install -# Copy source code +# Rebuild the source code only when needed +FROM base AS builder +WORKDIR /app +COPY --from=deps /app/node_modules ./node_modules COPY . . -# Build the application -RUN npm run build +# Next.js collects completely anonymous telemetry data about general usage. +# Learn more here: https://nextjs.org/telemetry +# Uncomment the following line in case you want to disable telemetry during the build. +# ENV NEXT_TELEMETRY_DISABLED=1 -# Production image -FROM node:20-alpine AS runner +RUN \ + if [ -f yarn.lock ]; then yarn run build; \ + elif [ -f package-lock.json ]; then npm run build; \ + elif [ -f pnpm-lock.yaml ]; then corepack enable pnpm && pnpm run build; \ + else echo "Lockfile not found." && exit 1; \ + fi +# Production image, copy all the files and run next +FROM base AS runner WORKDIR /app -# Copy necessary files from builder -COPY --from=builder /app/.next/standalone ./ -COPY --from=builder /app/.next/static ./.next/static +ENV NODE_ENV=production +# Uncomment the following line in case you want to disable telemetry during runtime. +# ENV NEXT_TELEMETRY_DISABLED=1 + +RUN addgroup --system --gid 1001 nodejs +RUN adduser --system --uid 1001 nextjs + COPY --from=builder /app/public ./public -# Set environment variables -ENV NODE_ENV=production -ENV PORT=3000 +# Automatically leverage output traces to reduce image size +# https://nextjs.org/docs/advanced-features/output-file-tracing +COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ +COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static + +USER nextjs -# Expose port EXPOSE 3000 -# Start the server using the standalone output +ENV PORT=3000 + +# server.js is created by next build from the standalone output +# https://nextjs.org/docs/pages/api-reference/config/next-config-js/output +ENV HOSTNAME="0.0.0.0" +ENV PORT=3000 CMD ["node", "server.js"] \ No newline at end of file diff --git a/apps/rowboat/app/projects/[projectId]/config/app.tsx b/apps/rowboat/app/projects/[projectId]/config/app.tsx index bfff53929..1aa687272 100644 --- a/apps/rowboat/app/projects/[projectId]/config/app.tsx +++ b/apps/rowboat/app/projects/[projectId]/config/app.tsx @@ -305,7 +305,7 @@ function McpServersSection({ /> { setNewServer({ ...newServer, url: e.target.value }); diff --git a/apps/rowboat/app/projects/[projectId]/config/components/tools.tsx b/apps/rowboat/app/projects/[projectId]/config/components/tools.tsx index 36c7112f7..9b66b9479 100644 --- a/apps/rowboat/app/projects/[projectId]/config/components/tools.tsx +++ b/apps/rowboat/app/projects/[projectId]/config/components/tools.tsx @@ -210,7 +210,7 @@ function McpServersSection({ projectId }: { projectId: string }) {
{ setNewServer({ ...newServer, url: e.target.value }); diff --git a/apps/rowboat/next.config.mjs b/apps/rowboat/next.config.mjs index 0882ba327..4e18a964f 100644 --- a/apps/rowboat/next.config.mjs +++ b/apps/rowboat/next.config.mjs @@ -1,47 +1,5 @@ /** @type {import('next').NextConfig} */ const nextConfig = { - // Disable some optimizations to reduce memory usage during build - swcMinify: true, // Enable minification - optimizeFonts: true, - productionBrowserSourceMaps: false, - - // Disable unnecessary features - images: { - unoptimized: true, // Disable image optimization - }, - - // Reduce output size - compress: true, - poweredByHeader: false, - - // App Router specific configurations - experimental: { - // Remove serverActions as it's now enabled by default - }, - - // Custom webpack configuration - webpack: (config, { isServer }) => { - // Add any custom webpack configurations here - return config; - }, - - // Ensure catch-all routes are handled correctly - pageExtensions: ['js', 'jsx', 'ts', 'tsx'], - - // Disable the default catch-all route handling - async rewrites() { - return [ - { - source: '/projects/:projectId/test/:appType*', - destination: '/projects/:projectId/test/:appType', - }, - { - source: '/projects/:projectId/workflow', - destination: '/projects/:projectId/workflow', - }, - ] - }, - output: 'standalone', }; diff --git a/apps/rowboat/package-lock.json b/apps/rowboat/package-lock.json index 159c7e3d4..f04383916 100644 --- a/apps/rowboat/package-lock.json +++ b/apps/rowboat/package-lock.json @@ -30,7 +30,6 @@ "clsx": "^2.1.1", "date-fns": "^4.1.0", "dotenv": "^16.4.5", - "express": "^4.18.3", "framer-motion": "^11.5.4", "fuse.js": "^7.1.0", "immer": "^10.1.1", @@ -9997,245 +9996,6 @@ "node": ">=18" } }, - "node_modules/@modelcontextprotocol/sdk/node_modules/accepts": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", - "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", - "license": "MIT", - "dependencies": { - "mime-types": "^3.0.0", - "negotiator": "^1.0.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/body-parser": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz", - "integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==", - "license": "MIT", - "dependencies": { - "bytes": "^3.1.2", - "content-type": "^1.0.5", - "debug": "^4.4.0", - "http-errors": "^2.0.0", - "iconv-lite": "^0.6.3", - "on-finished": "^2.4.1", - "qs": "^6.14.0", - "raw-body": "^3.0.0", - "type-is": "^2.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/content-disposition": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", - "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", - "license": "MIT", - "dependencies": { - "safe-buffer": "5.2.1" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/cookie": { - "version": "0.7.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", - "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/cookie-signature": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", - "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", - "license": "MIT", - "engines": { - "node": ">=6.6.0" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/express": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/express/-/express-5.1.0.tgz", - "integrity": "sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==", - "license": "MIT", - "dependencies": { - "accepts": "^2.0.0", - "body-parser": "^2.2.0", - "content-disposition": "^1.0.0", - "content-type": "^1.0.5", - "cookie": "^0.7.1", - "cookie-signature": "^1.2.1", - "debug": "^4.4.0", - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "etag": "^1.8.1", - "finalhandler": "^2.1.0", - "fresh": "^2.0.0", - "http-errors": "^2.0.0", - "merge-descriptors": "^2.0.0", - "mime-types": "^3.0.0", - "on-finished": "^2.4.1", - "once": "^1.4.0", - "parseurl": "^1.3.3", - "proxy-addr": "^2.0.7", - "qs": "^6.14.0", - "range-parser": "^1.2.1", - "router": "^2.2.0", - "send": "^1.1.0", - "serve-static": "^2.2.0", - "statuses": "^2.0.1", - "type-is": "^2.0.1", - "vary": "^1.1.2" - }, - "engines": { - "node": ">= 18" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/finalhandler": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", - "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", - "license": "MIT", - "dependencies": { - "debug": "^4.4.0", - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "on-finished": "^2.4.1", - "parseurl": "^1.3.3", - "statuses": "^2.0.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/fresh": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", - "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/media-typer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", - "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/merge-descriptors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", - "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/mime-db": { - "version": "1.54.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", - "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/mime-types": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", - "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", - "license": "MIT", - "dependencies": { - "mime-db": "^1.54.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "license": "MIT" - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/negotiator": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", - "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/send": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz", - "integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==", - "license": "MIT", - "dependencies": { - "debug": "^4.3.5", - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "etag": "^1.8.1", - "fresh": "^2.0.0", - "http-errors": "^2.0.0", - "mime-types": "^3.0.1", - "ms": "^2.1.3", - "on-finished": "^2.4.1", - "range-parser": "^1.2.1", - "statuses": "^2.0.1" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/serve-static": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.2.0.tgz", - "integrity": "sha512-61g9pCh0Vnh7IutZjtLGGpTA355+OPn2TyDv/6ivP2h/AdAVX9azsoxmg2/M6nZeQZNYBEwIcsne1mJd9oQItQ==", - "license": "MIT", - "dependencies": { - "encodeurl": "^2.0.0", - "escape-html": "^1.0.3", - "parseurl": "^1.3.3", - "send": "^1.2.0" - }, - "engines": { - "node": ">= 18" - } - }, - "node_modules/@modelcontextprotocol/sdk/node_modules/type-is": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", - "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==", - "license": "MIT", - "dependencies": { - "content-type": "^1.0.5", - "media-typer": "^1.1.0", - "mime-types": "^3.0.0" - }, - "engines": { - "node": ">= 0.6" - } - }, "node_modules/@mongodb-js/saslprep": { "version": "1.1.8", "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.1.8.tgz", @@ -13775,13 +13535,31 @@ } }, "node_modules/accepts": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", - "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", - "license": "MIT", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-2.0.0.tgz", + "integrity": "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==", + "dependencies": { + "mime-types": "^3.0.0", + "negotiator": "^1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/accepts/node_modules/mime-db": { + "version": "1.53.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.53.0.tgz", + "integrity": "sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/accepts/node_modules/mime-types": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.0.tgz", + "integrity": "sha512-XqoSHeCGjVClAmoGFG3lVFqQFRIrTVw2OH3axRqAcfaw+gHWIfnASS92AV+Rl/mk0MupgZTRHQOjxY6YVnzK5w==", "dependencies": { - "mime-types": "~2.1.34", - "negotiator": "0.6.3" + "mime-db": "^1.53.0" }, "engines": { "node": ">= 0.6" @@ -13946,12 +13724,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array-flatten": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", - "license": "MIT" - }, "node_modules/array-includes": { "version": "3.1.8", "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.8.tgz", @@ -14243,43 +14015,28 @@ } }, "node_modules/body-parser": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", - "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", - "license": "MIT", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.1.0.tgz", + "integrity": "sha512-/hPxh61E+ll0Ujp24Ilm64cykicul1ypfwjVttduAiEdtnJFvLePSrIPk+HMImtNv5270wOGCb1Tns2rybMkoQ==", "dependencies": { - "bytes": "3.1.2", - "content-type": "~1.0.5", - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.13.0", - "raw-body": "2.5.2", - "type-is": "~1.6.18", - "unpipe": "1.0.0" + "bytes": "^3.1.2", + "content-type": "^1.0.5", + "debug": "^4.4.0", + "http-errors": "^2.0.0", + "iconv-lite": "^0.5.2", + "on-finished": "^2.4.1", + "qs": "^6.14.0", + "raw-body": "^3.0.0", + "type-is": "^2.0.0" }, "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/body-parser/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "license": "MIT", - "dependencies": { - "ms": "2.0.0" + "node": ">=18" } }, "node_modules/body-parser/node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "license": "MIT", + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.5.2.tgz", + "integrity": "sha512-kERHXvpSaB4aU3eANwidg79K8FlrN77m8G9V+0vOR3HYaRifrlwMEpT7ZBJqLSEIHnEgJTHcWK82wwLwwKwtag==", "dependencies": { "safer-buffer": ">= 2.1.2 < 3" }, @@ -14287,42 +14044,6 @@ "node": ">=0.10.0" } }, - "node_modules/body-parser/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "license": "MIT" - }, - "node_modules/body-parser/node_modules/qs": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", - "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", - "license": "BSD-3-Clause", - "dependencies": { - "side-channel": "^1.0.6" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/body-parser/node_modules/raw-body": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", - "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, "node_modules/boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", @@ -14762,10 +14483,9 @@ "dev": true }, "node_modules/content-disposition": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", - "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", - "license": "MIT", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-1.0.0.tgz", + "integrity": "sha512-Au9nRL8VNUut/XSzbQA38+M78dzP4D+eqg3gfJHMIHHYa3bg067xj1KxMUWj+VULbiZMowKngFFbKczUrNJ1mg==", "dependencies": { "safe-buffer": "5.2.1" }, @@ -14796,10 +14516,12 @@ } }, "node_modules/cookie-signature": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==", - "license": "MIT" + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.2.2.tgz", + "integrity": "sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg==", + "engines": { + "node": ">=6.6.0" + } }, "node_modules/cors": { "version": "2.8.5", @@ -15130,7 +14852,6 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", - "license": "MIT", "engines": { "node": ">= 0.8", "npm": "1.2.8000 || >= 1.4.16" @@ -15294,8 +15015,7 @@ "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", - "license": "MIT" + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" }, "node_modules/electron-to-chromium": { "version": "1.5.74", @@ -15312,7 +15032,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", - "license": "MIT", "engines": { "node": ">= 0.8" } @@ -15589,8 +15308,7 @@ "node_modules/escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", - "license": "MIT" + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" }, "node_modules/escape-string-regexp": { "version": "4.0.0", @@ -16029,7 +15747,6 @@ "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", - "license": "MIT", "engines": { "node": ">= 0.6" } @@ -16067,49 +15784,45 @@ } }, "node_modules/express": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", - "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", - "license": "MIT", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/express/-/express-5.0.1.tgz", + "integrity": "sha512-ORF7g6qGnD+YtUG9yx4DFoqCShNMmUKiXuT5oWMHiOvt/4WFbHC6yCwQMTSBMno7AqntNCAzzcnnjowRkTL9eQ==", "dependencies": { - "accepts": "~1.3.8", - "array-flatten": "1.1.1", - "body-parser": "1.20.3", - "content-disposition": "0.5.4", + "accepts": "^2.0.0", + "body-parser": "^2.0.1", + "content-disposition": "^1.0.0", "content-type": "~1.0.4", "cookie": "0.7.1", - "cookie-signature": "1.0.6", - "debug": "2.6.9", + "cookie-signature": "^1.2.1", + "debug": "4.3.6", "depd": "2.0.0", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", - "finalhandler": "1.3.1", - "fresh": "0.5.2", + "finalhandler": "^2.0.0", + "fresh": "2.0.0", "http-errors": "2.0.0", - "merge-descriptors": "1.0.3", + "merge-descriptors": "^2.0.0", "methods": "~1.1.2", + "mime-types": "^3.0.0", "on-finished": "2.4.1", + "once": "1.4.0", "parseurl": "~1.3.3", - "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", "qs": "6.13.0", "range-parser": "~1.2.1", + "router": "^2.0.0", "safe-buffer": "5.2.1", - "send": "0.19.0", - "serve-static": "1.16.2", + "send": "^1.1.0", + "serve-static": "^2.1.0", "setprototypeof": "1.2.0", "statuses": "2.0.1", - "type-is": "~1.6.18", + "type-is": "^2.0.0", "utils-merge": "1.0.1", "vary": "~1.1.2" }, "engines": { - "node": ">= 0.10.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" + "node": ">= 18" } }, "node_modules/express-rate-limit": { @@ -16135,19 +15848,39 @@ } }, "node_modules/express/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "license": "MIT", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dependencies": { - "ms": "2.0.0" + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/express/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "license": "MIT" + "node_modules/express/node_modules/mime-db": { + "version": "1.53.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.53.0.tgz", + "integrity": "sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express/node_modules/mime-types": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.0.tgz", + "integrity": "sha512-XqoSHeCGjVClAmoGFG3lVFqQFRIrTVw2OH3axRqAcfaw+gHWIfnASS92AV+Rl/mk0MupgZTRHQOjxY6YVnzK5w==", + "dependencies": { + "mime-db": "^1.53.0" + }, + "engines": { + "node": ">= 0.6" + } }, "node_modules/express/node_modules/qs": { "version": "6.13.0", @@ -16281,38 +16014,21 @@ } }, "node_modules/finalhandler": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", - "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", - "license": "MIT", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.0.tgz", + "integrity": "sha512-/t88Ty3d5JWQbWYgaOGCCYfXRwV1+be02WqYYlL6h0lEiUAMPM8o8qKGO01YIkOHzka2up08wvgYD0mDiI+q3Q==", "dependencies": { - "debug": "2.6.9", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "statuses": "2.0.1", - "unpipe": "~1.0.0" + "debug": "^4.4.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "on-finished": "^2.4.1", + "parseurl": "^1.3.3", + "statuses": "^2.0.1" }, "engines": { "node": ">= 0.8" } }, - "node_modules/finalhandler/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/finalhandler/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "license": "MIT" - }, "node_modules/find-root": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz", @@ -16475,12 +16191,11 @@ } }, "node_modules/fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", - "license": "MIT", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-2.0.0.tgz", + "integrity": "sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==", "engines": { - "node": ">= 0.6" + "node": ">= 0.8" } }, "node_modules/fs.realpath": { @@ -17403,8 +17118,7 @@ "node_modules/is-promise": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", - "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==", - "license": "MIT" + "integrity": "sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==" }, "node_modules/is-regex": { "version": "1.1.4", @@ -18347,12 +18061,11 @@ } }, "node_modules/media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", - "license": "MIT", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", + "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==", "engines": { - "node": ">= 0.6" + "node": ">= 0.8" } }, "node_modules/memoize-one": { @@ -18366,10 +18079,12 @@ "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==" }, "node_modules/merge-descriptors": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", - "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", - "license": "MIT", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-2.0.0.tgz", + "integrity": "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g==", + "engines": { + "node": ">=18" + }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } @@ -18937,18 +18652,6 @@ "node": ">=8.6" } }, - "node_modules/mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", - "license": "MIT", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/mime-db": { "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", @@ -19119,10 +18822,9 @@ "dev": true }, "node_modules/negotiator": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", - "license": "MIT", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", + "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==", "engines": { "node": ">= 0.6" } @@ -19437,7 +19139,6 @@ "version": "2.4.1", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", - "license": "MIT", "dependencies": { "ee-first": "1.1.1" }, @@ -19720,7 +19421,6 @@ "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", - "license": "MIT", "engines": { "node": ">= 0.8" } @@ -19772,10 +19472,12 @@ } }, "node_modules/path-to-regexp": { - "version": "0.1.12", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", - "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", - "license": "MIT" + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", + "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", + "engines": { + "node": ">=16" + } }, "node_modules/path-type": { "version": "4.0.0", @@ -20118,7 +19820,6 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", - "license": "MIT", "engines": { "node": ">= 0.6" } @@ -20493,13 +20194,10 @@ } }, "node_modules/router": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz", - "integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==", - "license": "MIT", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/router/-/router-2.1.0.tgz", + "integrity": "sha512-/m/NSLxeYEgWNtyC+WtNHCF7jbGxOibVWKnn+1Psff4dJGOfoXP+MuC/f2CwSmyiHdOIzYnYFp4W6GxWfekaLA==", "dependencies": { - "debug": "^4.4.0", - "depd": "^2.0.0", "is-promise": "^4.0.0", "parseurl": "^1.3.3", "path-to-regexp": "^8.0.0" @@ -20508,15 +20206,6 @@ "node": ">= 18" } }, - "node_modules/router/node_modules/path-to-regexp": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.2.0.tgz", - "integrity": "sha512-TdrF7fW9Rphjq4RjrW0Kp2AW0Ahwu9sRGTkS6bvDi0SCwZlEZYmcfDbEsTz8RVk0EHIS/Vd1bv3JhG+1xZuAyQ==", - "license": "MIT", - "engines": { - "node": ">=16" - } - }, "node_modules/rowboat-shared": { "version": "1.0.0", "resolved": "git+ssh://git@github.com/rowboatlabs/shared.git#1c8e722b8c5d644672a2db92f4ba5f25d8560352", @@ -20654,72 +20343,52 @@ } }, "node_modules/send": { - "version": "0.19.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", - "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", - "license": "MIT", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/send/-/send-1.1.0.tgz", + "integrity": "sha512-v67WcEouB5GxbTWL/4NeToqcZiAWEq90N888fczVArY8A79J0L4FD7vj5hm3eUMua5EpoQ59wa/oovY6TLvRUA==", "dependencies": { - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "mime": "1.6.0", - "ms": "2.1.3", - "on-finished": "2.4.1", - "range-parser": "~1.2.1", - "statuses": "2.0.1" + "debug": "^4.3.5", + "destroy": "^1.2.0", + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "etag": "^1.8.1", + "fresh": "^0.5.2", + "http-errors": "^2.0.0", + "mime-types": "^2.1.35", + "ms": "^2.1.3", + "on-finished": "^2.4.1", + "range-parser": "^1.2.1", + "statuses": "^2.0.1" }, "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/send/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "license": "MIT", - "dependencies": { - "ms": "2.0.0" + "node": ">= 18" } }, - "node_modules/send/node_modules/debug/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "license": "MIT" - }, - "node_modules/send/node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", - "license": "MIT", + "node_modules/send/node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", "engines": { - "node": ">= 0.8" + "node": ">= 0.6" } }, "node_modules/send/node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "license": "MIT" + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "node_modules/serve-static": { - "version": "1.16.2", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", - "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", - "license": "MIT", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-2.1.0.tgz", + "integrity": "sha512-A3We5UfEjG8Z7VkDv6uItWw6HY2bBSBJT1KtVESn6EOoOr2jAxNhxWCLY3jDE2WcuHXByWju74ck3ZgLwL8xmA==", "dependencies": { - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "parseurl": "~1.3.3", - "send": "0.19.0" + "encodeurl": "^2.0.0", + "escape-html": "^1.0.3", + "parseurl": "^1.3.3", + "send": "^1.0.0" }, "engines": { - "node": ">= 0.8.0" + "node": ">= 18" } }, "node_modules/set-function-length": { @@ -21607,13 +21276,32 @@ } }, "node_modules/type-is": { - "version": "1.6.18", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", - "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", - "license": "MIT", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.0.tgz", + "integrity": "sha512-gd0sGezQYCbWSbkZr75mln4YBidWUN60+devscpLF5mtRDUpiaTvKpBNrdaCvel1NdR2k6vclXybU5fBd2i+nw==", + "dependencies": { + "content-type": "^1.0.5", + "media-typer": "^1.1.0", + "mime-types": "^3.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/type-is/node_modules/mime-db": { + "version": "1.53.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.53.0.tgz", + "integrity": "sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/type-is/node_modules/mime-types": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.0.tgz", + "integrity": "sha512-XqoSHeCGjVClAmoGFG3lVFqQFRIrTVw2OH3axRqAcfaw+gHWIfnASS92AV+Rl/mk0MupgZTRHQOjxY6YVnzK5w==", "dependencies": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" + "mime-db": "^1.53.0" }, "engines": { "node": ">= 0.6" diff --git a/apps/rowboat/package.json b/apps/rowboat/package.json index f1c431bdc..77e08c9e5 100644 --- a/apps/rowboat/package.json +++ b/apps/rowboat/package.json @@ -37,7 +37,6 @@ "clsx": "^2.1.1", "date-fns": "^4.1.0", "dotenv": "^16.4.5", - "express": "^4.18.3", "framer-motion": "^11.5.4", "fuse.js": "^7.1.0", "immer": "^10.1.1", diff --git a/apps/rowboat/server.js b/apps/rowboat/server.js deleted file mode 100644 index c396b41a7..000000000 --- a/apps/rowboat/server.js +++ /dev/null @@ -1,67 +0,0 @@ -import express from 'express' -import next from 'next' -import { MongoClient } from 'mongodb' -import { createServer } from 'http' - -// Create Express server -const dev = process.env.NODE_ENV !== 'production' -const app = next({ dev }) -const handle = app.getRequestHandler() -const port = parseInt(process.env.PORT || '3000', 10) -const hostname = '0.0.0.0' - -console.log(`Starting server with configuration: -- NODE_ENV: ${process.env.NODE_ENV} -- PORT: ${port} -- HOSTNAME: ${hostname} -- MONGODB_CONNECTION_STRING: ${process.env.MONGODB_CONNECTION_STRING ? 'set' : 'not set'} -`) - -// Prepare the server -app.prepare().then(async () => { - try { - const server = express() - const mongoClient = new MongoClient(process.env.MONGODB_CONNECTION_STRING) - - try { - await mongoClient.connect() - console.log('Connected to MongoDB') - } catch (error) { - console.error('MongoDB connection error:', error) - // Don't throw here, we can still start the server - } - - // Let Next.js handle all routes - server.all('*', (req, res) => { - return handle(req, res) - }) - - const httpServer = createServer(server) - - // Add error handling for the server - httpServer.on('error', (error) => { - console.error('Server error:', error) - if (error.code === 'EADDRINUSE') { - console.error(`Port ${port} is already in use`) - process.exit(1) - } - }) - - // Add connection handling - httpServer.on('connection', (socket) => { - console.log('New connection from:', socket.remoteAddress) - }) - - // Start the server - httpServer.listen(port, hostname, () => { - console.log(`> Server is ready and listening on http://${hostname}:${port}`) - }) - - } catch (error) { - console.error('Failed to start server:', error) - process.exit(1) - } -}).catch((error) => { - console.error('Failed to prepare Next.js app:', error) - process.exit(1) -}) \ No newline at end of file diff --git a/apps/rowboat_agents/Dockerfile b/apps/rowboat_agents/Dockerfile index 019c6c2d3..c23a3f859 100644 --- a/apps/rowboat_agents/Dockerfile +++ b/apps/rowboat_agents/Dockerfile @@ -4,21 +4,35 @@ FROM python:3.12-slim # Set working directory in container WORKDIR /app -# Install poetry -RUN pip install poetry +# Install system dependencies +RUN apt-get update && apt-get install -y \ + build-essential \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# Install poetry with specific version +RUN curl -sSL https://install.python-poetry.org | python3 - --version 1.7.1 + +# Add poetry to PATH +ENV PATH="/root/.local/bin:$PATH" # Copy poetry files COPY pyproject.toml poetry.lock ./ -# Configure poetry to not create virtual environment in container -RUN poetry config virtualenvs.create false +# Configure poetry +RUN poetry config virtualenvs.create false \ + && poetry config installer.max-workers 10 -# Install dependencies -RUN poetry install --no-interaction --no-ansi +# Regenerate lock file and install dependencies +RUN poetry lock --no-update \ + && poetry install --no-interaction --no-ansi --no-root -v # Copy project files COPY . . +# Install the project with verbose output +RUN poetry install --no-interaction --no-ansi -v + # Set environment variables ENV QUART_APP=src.app.main ENV PYTHONUNBUFFERED=1 diff --git a/apps/rowboat_agents/pyproject.toml b/apps/rowboat_agents/pyproject.toml index 11edf8beb..d0bcef4c2 100644 --- a/apps/rowboat_agents/pyproject.toml +++ b/apps/rowboat_agents/pyproject.toml @@ -6,7 +6,7 @@ authors = ["Akhilesh "] license = "MIT" readme = "README.md" homepage = "https://github.com/rowboatlabs/agents" -package-mode = false +packages = [] [tool.poetry.dependencies] python = ">=3.10,<4.0" @@ -20,7 +20,7 @@ beautifulsoup4 = "^4.12.3" blinker = "^1.9.0" build = "1.2.2.post1" CacheControl = "^0.14.2" -certifi = "^2024.12.14" +certifi = ">=2023.7.22,<2024.0.0" cffi = "^1.17.1" charset-normalizer = "^3.4.1" cleo = "^2.1.0" @@ -51,6 +51,7 @@ Jinja2 = "^3.1.5" jiter = "^0.6.1" jsonpath-python = "^1.0.6" keyring = "^25.6.0" +litellm = "1.0.0" lxml = "^5.3.0" markdownify = "^0.13.1" MarkupSafe = "^3.0.2" diff --git a/apps/rowboat_agents/requirements.txt b/apps/rowboat_agents/requirements.txt index 29063f1e9..45bfcab59 100644 --- a/apps/rowboat_agents/requirements.txt +++ b/apps/rowboat_agents/requirements.txt @@ -58,7 +58,7 @@ jsonpath-python==1.0.6 jsonschema==4.23.0 jsonschema-specifications==2025.4.1 keyring==25.6.0 -litellm==1.67.2 +litellm>=1.0.0 lxml==5.3.0 Markdown==3.7 markdownify==0.13.1 @@ -77,7 +77,7 @@ multidict==6.2.0 mypy-extensions==1.0.0 nest-asyncio==1.6.0 numpy==2.2.1 -openai==1.76.0 +openai>=1.0.0 openai-agents==0.0.13 openpyxl==3.1.5 packaging==24.2 @@ -104,7 +104,7 @@ pymongo==4.10.1 pyproject_hooks==1.2.0 python-dateutil==2.9.0.post0 python-docx==1.1.2 -python-dotenv==1.0.1 +python-dotenv>=1.0.0 pytz==2024.2 PyYAML==6.0.2 pyyaml_env_tag==0.1 diff --git a/apps/rowboat_agents/src/graph/core.py b/apps/rowboat_agents/src/graph/core.py index e59079a62..6e94f0571 100644 --- a/apps/rowboat_agents/src/graph/core.py +++ b/apps/rowboat_agents/src/graph/core.py @@ -153,8 +153,8 @@ async def run_turn_streamed( # Initialize agents and get external tools new_agents = get_agents( - agent_configs=agent_configs, - tool_configs=tool_configs, + agent_configs=agent_configs, + tool_configs=tool_configs, complete_request=complete_request ) new_agents = add_child_transfer_related_instructions_to_agents(new_agents) @@ -196,12 +196,15 @@ async def run_turn_streamed( if event.type == "raw_response_event": # Handle token usage counting if hasattr(event.data, 'type') and event.data.type == "response.completed" and hasattr(event.data.response, 'usage'): - tokens_used["total"] += event.data.response.usage.total_tokens - tokens_used["prompt"] += event.data.response.usage.input_tokens - tokens_used["completion"] += event.data.response.usage.output_tokens - print('-'*50) - print(f"Found usage information. Updated cumulative tokens: {tokens_used}") - print('-'*50) + try: + tokens_used["total"] += event.data.response.usage.total_tokens + tokens_used["prompt"] += event.data.response.usage.input_tokens + tokens_used["completion"] += event.data.response.usage.output_tokens + print('-'*50) + print(f"Found usage information. Updated cumulative tokens: {tokens_used}") + print('-'*50) + except Exception as e: + print(f"Warning: Tokens used is likely not available for your chosen model: {e}") web_search_messages = handle_web_search_event(event, current_agent) for message in web_search_messages: diff --git a/apps/rowboat_agents/src/graph/execute_turn.py b/apps/rowboat_agents/src/graph/execute_turn.py index 353078ced..2238db67c 100644 --- a/apps/rowboat_agents/src/graph/execute_turn.py +++ b/apps/rowboat_agents/src/graph/execute_turn.py @@ -96,54 +96,104 @@ async def call_webhook(tool_name: str, args: str, webhook_url: str, signing_secr async def call_mcp(tool_name: str, args: str, mcp_server_url: str) -> str: try: - print(f"MCP tool called for: {tool_name}") + print(f"\n=== MCP Tool Call Details ===") + print(f"Tool Name: {tool_name}") + print(f"Arguments: {args}") + print(f"MCP Server URL: {mcp_server_url}") + async with sse_client(url=mcp_server_url) as streams: + print("āœ“ SSE client created") async with ClientSession(*streams) as session: + print("āœ“ Client session created") await session.initialize() + print("āœ“ Session initialized") jargs = json.loads(args) + print(f"Calling MCP tool with args: {json.dumps(jargs, indent=2)}") response = await session.call_tool(tool_name, arguments=jargs) + print(f"āœ“ MCP response received: {response}") json_output = json.dumps([item.__dict__ for item in response.content], indent=2) - - return json_output + print(f"Formatted response: {json_output}") + return json_output except Exception as e: - print(f"Error in call_mcp: {str(e)}") + print(f"ERROR in call_mcp: {str(e)}") + print(f"Stack trace:", exc_info=True) return f"Error: {str(e)}" async def catch_all(ctx: RunContextWrapper[Any], args: str, tool_name: str, tool_config: dict, complete_request: dict) -> str: try: - print(f"Catch all called for tool: {tool_name}") - print(f"Args: {args}") - print(f"Tool config: {tool_config}") + print(f"\n=== Catch All Function Called ===") + print(f"Tool Name: {tool_name}") + print(f"Raw Args: {args}") + print(f"Tool Config: {json.dumps(tool_config, indent=2)}") + print(f"Complete Request: {json.dumps(complete_request, indent=2)}") # Create event loop for async operations try: loop = asyncio.get_event_loop() + print("āœ“ Event loop obtained") except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) + print("āœ“ New event loop created") response_content = None if tool_config.get("mockTool", False) or complete_request.get("testProfile", {}).get("mockTools", False): - # Call mock_tool to handle the response (it will decide whether to use mock instructions or generate a response) + print("Using mock tool") + # Call mock_tool to handle the response if complete_request.get("testProfile", {}).get("mockPrompt", ""): response_content = await mock_tool(tool_name, args, tool_config.get("description", ""), complete_request.get("testProfile", {}).get("mockPrompt", "")) else: response_content = await mock_tool(tool_name, args, tool_config.get("description", ""), tool_config.get("mockInstructions", "")) - print(response_content) + print(f"Mock tool response: {response_content}") elif tool_config.get("isMcp", False): + print("\n=== MCP Tool Processing ===") mcp_server_name = tool_config.get("mcpServerName", "") - mcp_servers = complete_request.get("mcpServers", {}) + mcp_servers = complete_request.get("mcpServers", []) + print(f"MCP Server Name: {mcp_server_name}") + print(f"Available MCP Servers: {json.dumps(mcp_servers, indent=2)}") mcp_server_url = next((server.get("url", "") for server in mcp_servers if server.get("name") == mcp_server_name), "") - response_content = await call_mcp(tool_name, args, mcp_server_url) + print(f"Selected MCP Server URL: {mcp_server_url}") + + if not mcp_server_url: + print("āŒ ERROR: MCP server URL not found") + return f"Error: MCP server {mcp_server_name} not found" + + # Parse the arguments + try: + print("\n=== Parsing Arguments ===") + if isinstance(args, str): + print(f"Args is string, attempting to parse JSON") + jargs = json.loads(args) + else: + print(f"Args is already a dict/object") + jargs = args + print(f"Successfully parsed arguments: {json.dumps(jargs, indent=2)}") + + # Call the MCP tool + print("\n=== Calling MCP Tool ===") + response_content = await call_mcp(tool_name, json.dumps(jargs), mcp_server_url) + print(f"āœ“ MCP tool response received: {response_content}") + except json.JSONDecodeError as e: + print(f"āŒ Error parsing arguments: {e}") + print(f"Raw arguments: {args}") + return f"Error: Invalid arguments format - {str(e)}" else: + print("\n=== Webhook Processing ===") collection = db["projects"] doc = collection.find_one({"_id": complete_request.get("projectId", "")}) signing_secret = doc.get("secret", "") webhook_url = complete_request.get("toolWebhookUrl", "") + print(f"Webhook URL: {webhook_url}") + print(f"Signing Secret: {'*' * len(signing_secret) if signing_secret else 'None'}") response_content = await call_webhook(tool_name, args, webhook_url, signing_secret) + print(f"āœ“ Webhook response: {response_content}") + + print(f"\n=== Final Response ===") + print(f"Response Content: {response_content}") return response_content except Exception as e: - print(f"Error in catch_all: {str(e)}") + print(f"āŒ Error in catch_all: {str(e)}") + print(f"Stack trace:", exc_info=True) return f"Error: {str(e)}" @@ -193,8 +243,9 @@ def get_agents(agent_configs, tool_configs, complete_request): new_agent_name_to_index = {} # Create Agent objects from config for agent_config in agent_configs: - print("="*100) - print(f"Processing config for agent: {agent_config['name']}") + print("\n=== Processing Agent Config ===") + print(f"Agent Name: {agent_config['name']}") + print(f"Agent Config: {json.dumps(agent_config, indent=2)}") # If hasRagSources, append the RAG tool to the agent's tools if agent_config.get("hasRagSources", False): @@ -205,42 +256,49 @@ def get_agents(agent_configs, tool_configs, complete_request): # Prepare tool lists for this agent external_tools = [] + print(f"\n=== Setting Up Tools ===") print(f"Agent {agent_config['name']} has {len(agent_config['tools'])} configured tools") new_tools = [] for tool_name in agent_config["tools"]: - tool_config = get_tool_config_by_name(tool_configs, tool_name) if tool_config: + print(f"\n=== Processing Tool: {tool_name} ===") + print(f"Tool Config: {json.dumps(tool_config, indent=2)}") + external_tools.append({ "type": "function", "function": tool_config }) if tool_name == "web_search": tool = WebSearchTool() - elif tool_name == "rag_search": tool = get_rag_tool(agent_config, complete_request) - else: + print(f"Creating custom tool: {tool_name}") tool = FunctionTool( name=tool_name, description=tool_config["description"], params_json_schema=tool_config["parameters"], strict_json_schema=False, - on_invoke_tool=lambda ctx, args, _tool_name=tool_name, _tool_config=tool_config, _complete_request=complete_request: - catch_all(ctx, args, _tool_name, _tool_config, _complete_request) + on_invoke_tool=lambda ctx, args, _tool_name=tool_name, _tool_config=tool_config, _complete_request=complete_request: + catch_all(ctx, args, _tool_name, _tool_config, _complete_request) ) if tool: new_tools.append(tool) - print(f"Added tool {tool_name} to agent {agent_config['name']}") + print(f"āœ“ Added tool {tool_name} to agent {agent_config['name']}") + if tool_config.get("isMcp", False): + print(f"Tool {tool_name} is an MCP tool") + print(f"MCP Server Name: {tool_config.get('mcpServerName', '')}") + print(f"MCP Server URL: {next((server.get('url', '') for server in complete_request.get('mcpServers', []) if server.get('name') == tool_config.get('mcpServerName', '')), '')}") else: - print(f"WARNING: Tool {tool_name} not found in tool_configs") + print(f"āŒ WARNING: Tool {tool_name} not found in tool_configs") # Create the agent object - print(f"Creating Agent object for {agent_config['name']}") + print(f"\n=== Creating Agent Object ===") + print(f"Agent Name: {agent_config['name']}") # add the name and description to the agent instructions agent_instructions = f"## Your Name\n{agent_config['name']}\n\n## Description\n{agent_config['description']}\n\n## Instructions\n{agent_config['instructions']}" @@ -248,7 +306,37 @@ def get_agents(agent_configs, tool_configs, complete_request): # Identify the model model_name = agent_config["model"] if agent_config["model"] else PROVIDER_DEFAULT_MODEL print(f"Using model: {model_name}") - model=OpenAIChatCompletionsModel(model=model_name, openai_client=client) if client else agent_config["model"] + + # Initialize model based on type + if model_name == PROVIDER_DEFAULT_MODEL: + model = OpenAIChatCompletionsModel(model=model_name, openai_client=client) if client else agent_config["model"] + else: + # For non-GPT-4o models, use the model directly + if isinstance(agent_config["model"], str): + # Use the model name as is since it's being routed through LiteLLM + base_model = OpenAIChatCompletionsModel(model=agent_config["model"], openai_client=client) + + # Wrap the model to handle tool calls + class ToolCallModel(OpenAIChatCompletionsModel): + def __init__(self, base_model, tools): + super().__init__(model=base_model.model, openai_client=base_model._client) + self.base_model = base_model + self.tools = tools + print(f"\n=== ToolCallModel Initialized ===") + print(f"Model: {self.model}") + print(f"Available Tools: {[t.name for t in self.tools]}") + print(f"Tool Details:") + for tool in self.tools: + print(f"- {tool.name}: {tool.__dict__}") + + # Create the ToolCallModel instance + model = ToolCallModel(base_model, new_tools) + print(f"\n=== Created ToolCallModel for {agent_config['name']} ===") + print(f"Model: {model.model}") + print(f"Tools: {[t.name for t in model.tools]}") + else: + # If it's already a model object, use it directly + model = agent_config["model"] # Create the agent object new_agent = NewAgent( @@ -256,8 +344,8 @@ def get_agents(agent_configs, tool_configs, complete_request): instructions=agent_instructions, handoff_description=agent_config["description"], tools=new_tools, - model = model, - model_settings=ModelSettings(temperature=0.0) + model = model, # Pass the model directly + model_settings=ModelSettings(temperature=0.0) # Use only temperature setting ) # Set the max calls per parent agent @@ -278,9 +366,158 @@ def get_agents(agent_configs, tool_configs, complete_request): new_agent_to_children[agent_config["name"]] = agent_config.get("connectedAgents", []) new_agent_name_to_index[agent_config["name"]] = len(new_agents) new_agents.append(new_agent) - print(f"Successfully created agent: {agent_config['name']}") + print(f"āœ“ Successfully created agent: {agent_config['name']}") + + # Set up tool execution + if hasattr(new_agent, 'on_tool_call'): + original_on_tool_call = new_agent.on_tool_call + + async def on_tool_call_wrapper(tool_call): + try: + # Extract tool name and arguments + tool_name = tool_call.function.name + arguments = json.loads(tool_call.function.arguments) + + print(f"\n=== Tool Call Details ===") + print(f"Tool Name: {tool_name}") + print(f"Arguments: {json.dumps(arguments, indent=2)}") + + # Find the tool in new_tools + tool = next((t for t in new_tools if t.name == tool_name), None) + if tool: + print(f"āœ“ Found tool: {tool.name}") + print(f"Tool config: {tool.__dict__}") + + # Execute the tool + print(f"Executing tool: {tool_name}") + try: + # Strip Python code block markers if present + args = json.dumps(arguments) + if args.startswith('<|python_start|>'): + args = args[len('<|python_start|>'):] + if args.endswith('<|python_end|>'): + args = args[:-len('<|python_end|>')] + + result = await tool.on_invoke_tool(None, args) + print(f"āœ“ Tool execution result: {result}") + return result + except Exception as e: + print(f"āŒ Error executing tool {tool_name}: {str(e)}") + print(f"Stack trace:", exc_info=True) + return f"Error executing tool {tool_name}: {str(e)}" + else: + print(f"āŒ ERROR: Tool {tool_name} not found in available tools: {[t.name for t in new_tools]}") + return f"Tool {tool_name} not found" + except Exception as e: + print(f"āŒ ERROR in tool execution: {str(e)}") + print(f"Stack trace:", exc_info=True) + return f"Error: {str(e)}" + + new_agent.on_tool_call = on_tool_call_wrapper + print(f"āœ“ Set up tool call handler for agent {new_agent.name}") + + # Set up tool call handling + if hasattr(new_agent, 'handle_tool_calls'): + original_handle_tool_calls = new_agent.handle_tool_calls + + async def handle_tool_calls_wrapper(tool_calls): + try: + if not tool_calls: + print("No tool calls to handle") + return None + + print(f"\n=== Handling Tool Calls ===") + print(f"Number of tool calls: {len(tool_calls)}") + + results = [] + for tool_call in tool_calls: + if hasattr(tool_call, 'function'): + print(f"\nProcessing tool call: {tool_call.function.name}") + result = await new_agent.on_tool_call(tool_call) + print(f"āœ“ Tool call result: {result}") + results.append(result) + else: + print(f"āŒ Tool call missing function attribute: {tool_call}") + + return results + except Exception as e: + print(f"āŒ ERROR in handle_tool_calls: {str(e)}") + print(f"Stack trace:", exc_info=True) + return None + + new_agent.handle_tool_calls = handle_tool_calls_wrapper + print(f"āœ“ Set up tool calls handler for agent {new_agent.name}") + + # Set up response handling + if hasattr(new_agent, 'on_response'): + original_on_response = new_agent.on_response + + async def on_response_wrapper(response): + try: + # Check if response contains tool calls + if isinstance(response, str) and '"tool_calls"' in response: + try: + print(f"\n=== Processing Response with Tool Calls ===") + print(f"Raw response: {response}") + + # Extract the tool calls section + tool_calls_start = response.find('"tool_calls"') + if tool_calls_start >= 0: + # Find the start of the array + array_start = response.find('[', tool_calls_start) + if array_start >= 0: + # Count brackets to find the end of the array + bracket_count = 1 + array_end = array_start + 1 + while bracket_count > 0 and array_end < len(response): + if response[array_end] == '[': + bracket_count += 1 + elif response[array_end] == ']': + bracket_count -= 1 + array_end += 1 + + if bracket_count == 0: + # Extract the tool calls array + tool_calls_str = response[array_start:array_end] + try: + tool_calls = json.loads(tool_calls_str) + print(f"Extracted tool calls: {json.dumps(tool_calls, indent=2)}") + + # Convert to tool call objects + tool_call_objects = [] + for tc in tool_calls: + if 'function' in tc: + tool_call = type('ToolCall', (), { + 'function': type('Function', (), { + 'name': tc['function']['name'], + 'arguments': json.dumps(tc['function']['arguments']) + }) + }) + tool_call_objects.append(tool_call) + + # Handle the tool calls + if tool_call_objects: + print(f"Executing {len(tool_call_objects)} tool calls") + results = await new_agent.handle_tool_calls(tool_call_objects) + print(f"āœ“ Tool call results: {results}") + return results + except json.JSONDecodeError as e: + print(f"āŒ Failed to parse tool calls: {e}") + print(f"Tool calls string: {tool_calls_str}") + except Exception as e: + print(f"āŒ Error extracting tool calls: {e}") + + # If no tool calls or parsing failed, use original handler + return await original_on_response(response) + except Exception as e: + print(f"āŒ ERROR in response handling: {str(e)}") + print(f"Stack trace:", exc_info=True) + return None + + new_agent.on_response = on_response_wrapper + print(f"āœ“ Set up response handler for agent {new_agent.name}") except Exception as e: - print(f"ERROR: Failed to create agent {agent_config['name']}: {str(e)}") + print(f"āŒ ERROR: Failed to create agent {agent_config['name']}: {str(e)}") raise for new_agent in new_agents: @@ -290,7 +527,8 @@ def get_agents(agent_configs, tool_configs, complete_request): # Look up the agent's children from the old agent and create a list called handoffs in new_agent with pointers to the children in new_agents new_agent.handoffs = [new_agents[new_agent_name_to_index[child]] for child in new_agent_to_children[new_agent.name]] - print("Returning created agents") + print("\n=== Returning Created Agents ===") + print(f"Number of agents: {len(new_agents)}") print("="*100) return new_agents @@ -352,6 +590,99 @@ async def wrapped_stream_events(): with trace(f"Agent turn: {agent.name}") as trace_ctx: try: async for event in original_stream_events(): + # Check if event contains tool calls + if hasattr(event, 'content') and event.content: + # Try to find tool calls in the content + if '"tool_calls"' in event.content: + print(f"\n=== Found Tool Calls in Stream ===") + print(f"Raw content: {event.content}") + + # Extract the tool calls section + tool_calls_start = event.content.find('"tool_calls"') + if tool_calls_start >= 0: + # Find the start of the array + array_start = event.content.find('[', tool_calls_start) + if array_start >= 0: + # Count brackets to find the end of the array + bracket_count = 1 + array_end = array_start + 1 + while bracket_count > 0 and array_end < len(event.content): + if event.content[array_end] == '[': + bracket_count += 1 + elif event.content[array_end] == ']': + bracket_count -= 1 + array_end += 1 + + if bracket_count == 0: + # Extract the tool calls array + tool_calls_str = event.content[array_start:array_end] + try: + tool_calls = json.loads(tool_calls_str) + print(f"Extracted tool calls: {json.dumps(tool_calls, indent=2)}") + + # Convert to tool call objects + tool_call_objects = [] + for tc in tool_calls: + if 'function' in tc: + tool_call = type('ToolCall', (), { + 'function': type('Function', (), { + 'name': tc['function']['name'], + 'arguments': json.dumps(tc['function']['arguments']) + }) + }) + tool_call_objects.append(tool_call) + + # Handle the tool calls + if tool_call_objects: + print(f"Executing {len(tool_call_objects)} tool calls") + results = [] + for tool_call in tool_call_objects: + print(f"\n=== Processing Tool Call ===") + print(f"Tool Name: {tool_call.function.name}") + print(f"Arguments: {tool_call.function.arguments}") + + # Find the tool in the model's tools + tool = next((t for t in self.tools if t.name == tool_call.function.name), None) + if tool: + print(f"Found tool: {tool.name}") + print(f"Tool config: {tool.__dict__}") + + # Execute the tool + try: + # Strip Python code block markers if present + args = tool_call.function.arguments + if args.startswith('<|python_start|>'): + args = args[len('<|python_start|>'):] + if args.endswith('<|python_end|>'): + args = args[:-len('<|python_end|>')] + + result = await tool.on_invoke_tool(None, args) + print(f"Tool execution result: {result}") + + # Create a new event with the tool result + event.content = result + event.tool_calls = [tool_call] + yield event + + results.append(result) + except Exception as e: + print(f"Error executing tool {tool.name}: {str(e)}") + print(f"Stack trace:", exc_info=True) + error_msg = f"Error executing tool {tool.name}: {str(e)}" + event.content = error_msg + yield event + else: + print(f"ERROR: Tool {tool_call.function.name} not found in available tools: {[t.name for t in self.tools]}") + error_msg = f"Tool {tool_call.function.name} not found" + event.content = error_msg + yield event + + print(f"Tool call results: {results}") + # Don't create a new event here, we already yielded the results + return + except json.JSONDecodeError as e: + print(f"Failed to parse tool calls: {e}") + print(f"Tool calls string: {tool_calls_str}") yield event except GeneratorExit: # Handle generator exit gracefully diff --git a/apps/rowboat_agents/src/utils/client.py b/apps/rowboat_agents/src/utils/client.py index 44af3e979..a1bb625a8 100644 --- a/apps/rowboat_agents/src/utils/client.py +++ b/apps/rowboat_agents/src/utils/client.py @@ -20,7 +20,10 @@ if PROVIDER_BASE_URL: print(f"Using provider {PROVIDER_BASE_URL}") - client = AsyncOpenAI(base_url=PROVIDER_BASE_URL, api_key=PROVIDER_API_KEY) + client = AsyncOpenAI( + base_url=PROVIDER_BASE_URL, + default_headers={"Authorization": f"Bearer {PROVIDER_API_KEY}"} + ) else: print("No provider base URL configured, using OpenAI directly") @@ -28,8 +31,8 @@ if PROVIDER_BASE_URL: print(f"Using provider {PROVIDER_BASE_URL} for completions") completions_client = OpenAI( - base_url=PROVIDER_BASE_URL, - api_key=PROVIDER_API_KEY + base_url=PROVIDER_BASE_URL, + default_headers={"Authorization": f"Bearer {PROVIDER_API_KEY}"} ) else: print(f"Using OpenAI directly for completions") diff --git a/apps/rowboat_agents/src/utils/common.py b/apps/rowboat_agents/src/utils/common.py index 0fb9791b4..109a9482a 100644 --- a/apps/rowboat_agents/src/utils/common.py +++ b/apps/rowboat_agents/src/utils/common.py @@ -5,7 +5,7 @@ import sys import time from dotenv import load_dotenv -from openai import OpenAI +from .model_provider import model_provider from src.utils.client import completions_client load_dotenv() @@ -33,18 +33,18 @@ def setup_logger(name, log_file='./run.log', level=logging.INFO, log_to_file=Fal return logger -common_logger = setup_logger('logger') -logger = common_logger +# Create a common logger instance +common_logger = setup_logger('common') def read_json_from_file(file_name): - logger.info(f"Reading json from {file_name}") + common_logger.info(f"Reading json from {file_name}") try: with open(file_name, 'r') as file: out = file.read() out = json.loads(out) return out except Exception as e: - logger.error(e) + common_logger.error(e) return None def get_api_key(key_name): @@ -54,54 +54,52 @@ def get_api_key(key_name): raise ValueError(f"{key_name} not found. Did you set it in the .env file?") return api_key -def generate_gpt4o_output_from_multi_turn_conv(messages, output_type='json', model="gpt-4o"): - return generate_openai_output(messages, output_type, model) +async def generate_gpt4o_output_from_multi_turn_conv(messages, output_type='json', model="gpt-4o"): + return await generate_openai_output(messages, output_type, model) -def generate_openai_output(messages, output_type='not_json', model="gpt-4o", return_completion=False): - print(f"In generate_openai_output, using client: {completions_client} and model: {model}") +async def generate_openai_output(messages, output_type='not_json', model="gpt-4o", return_completion=False): + print(f"In generate_openai_output, using model: {model}") try: - if output_type == 'json': - chat_completion = completions_client.chat.completions.create( - model=model, - messages=messages, - response_format={"type": "json_object"} - ) - else: - chat_completion = completions_client.chat.completions.create( - model=model, - messages=messages, - ) + response = await model_provider.generate_output( + messages=messages, + model=model, + output_type=output_type + ) if return_completion: - return chat_completion - return chat_completion.choices[0].message.content + return response + return response except Exception as e: - logger.error(e) + common_logger.error(e) return None -def generate_llm_output(messages, model): - model_provider = None - if "gpt" in model: - model_provider = "openai" - else: - raise ValueError(f"Model {model} not supported") - - if model_provider == "openai": - response = generate_openai_output(messages, output_type='text', model=model) +async def generate_llm_output(messages, model): + try: + response = await model_provider.generate_output( + messages=messages, + model=model, + output_type='text' + ) return response - -def generate_gpt4o_output_from_multi_turn_conv_multithreaded(messages, retries=5, delay=1, output_type='json'): + except Exception as e: + common_logger.error(f"Error generating LLM output: {str(e)}") + return None + +async def generate_gpt4o_output_from_multi_turn_conv_multithreaded(messages, retries=5, delay=1, output_type='json'): while retries > 0: try: # Call GPT-4o API - output = generate_gpt4o_output_from_multi_turn_conv(messages, output_type='json') + output = await generate_gpt4o_output_from_multi_turn_conv(messages, output_type='json') return output # If the request is successful, break out of the loop - except openai.RateLimitError: - print(f'Rate limit exceeded. Retrying in {delay} seconds...') - time.sleep(delay) - delay *= 2 # Exponential backoff - retries -= 1 + except Exception as e: + if "rate_limit" in str(e).lower(): + print(f'Rate limit exceeded. Retrying in {delay} seconds...') + time.sleep(delay) + delay *= 2 # Exponential backoff + retries -= 1 + else: + raise if retries == 0: print(f'Failed to process due to rate limit.') @@ -131,14 +129,14 @@ def merge_defaultdicts(dict_parent, dict_child): return dict_parent def read_jsonl_from_file(file_name): - # logger.info(f"Reading jsonl from {file_name}") + # common_logger.info(f"Reading jsonl from {file_name}") try: with open(file_name, 'r') as file: lines = file.readlines() dataset = [json.loads(line.strip()) for line in lines] return dataset except Exception as e: - logger.error(e) + common_logger.error(e) return None def write_jsonl_to_file(list_dicts, file_name): @@ -148,7 +146,7 @@ def write_jsonl_to_file(list_dicts, file_name): file.write(json.dumps(d)+'\n') return True except Exception as e: - logger.error(e) + common_logger.error(e) return False def read_text_from_file(file_name): @@ -157,7 +155,7 @@ def read_text_from_file(file_name): out = file.read() return out except Exception as e: - logger.error(e) + common_logger.error(e) return None def write_json_to_file(data, file_name): @@ -166,7 +164,7 @@ def write_json_to_file(data, file_name): json.dump(data, file, indent=4) return True except Exception as e: - logger.error(e) + common_logger.error(e) return False diff --git a/apps/rowboat_agents/src/utils/model_provider.py b/apps/rowboat_agents/src/utils/model_provider.py new file mode 100644 index 000000000..6cc694804 --- /dev/null +++ b/apps/rowboat_agents/src/utils/model_provider.py @@ -0,0 +1,174 @@ +import os +import logging +from typing import List, Dict, Any, Optional, Union +from openai import OpenAI, AsyncOpenAI +import litellm +from litellm import completion, acompletion + +# Configure logging +logger = logging.getLogger(__name__) + +class ModelProvider: + def __init__(self): + self.provider_base_url = os.getenv('PROVIDER_BASE_URL', '') + self.provider_api_key = os.getenv('PROVIDER_API_KEY') or os.getenv('OPENAI_API_KEY') + self.default_model = os.getenv('PROVIDER_DEFAULT_MODEL', 'gpt-4.1') + + if not self.provider_api_key: + raise ValueError("No LLM Provider API key found") + + # Initialize clients + self._init_clients() + + def _init_clients(self): + """Initialize OpenAI and LiteLLM clients based on configuration.""" + if self.provider_base_url: + logger.info(f"Using provider {self.provider_base_url}") + self.openai_client = AsyncOpenAI( + base_url=self.provider_base_url, + api_key=self.provider_api_key + ) + self.completions_client = OpenAI( + base_url=self.provider_base_url, + api_key=self.provider_api_key + ) + else: + logger.info("Using OpenAI directly") + self.openai_client = AsyncOpenAI(api_key=self.provider_api_key) + self.completions_client = OpenAI(api_key=self.provider_api_key) + + # Configure LiteLLM + litellm.api_key = self.provider_api_key + if self.provider_base_url: + litellm.api_base = self.provider_base_url + + def _is_litellm_model(self, model_name: str) -> bool: + """Check if the model should be handled by LiteLLM.""" + # Add your logic here to determine if a model should use LiteLLM + # For example, check if it's a non-OpenAI model + return not model_name.startswith('gpt-') + + async def generate_output( + self, + messages: List[Dict[str, str]], + model: Optional[str] = None, + output_type: str = 'text', + temperature: float = 0.0, + max_tokens: Optional[int] = None, + stream: bool = False + ) -> Union[str, Dict[str, Any]]: + """ + Generate output using either OpenAI or LiteLLM based on the model. + + Args: + messages: List of message dictionaries + model: Model name to use (defaults to provider default) + output_type: Type of output ('text' or 'json') + temperature: Sampling temperature + max_tokens: Maximum tokens to generate + stream: Whether to stream the response + + Returns: + Generated output as string or dictionary + """ + model = model or self.default_model + + try: + if self._is_litellm_model(model): + return await self._generate_litellm_output( + messages=messages, + model=model, + output_type=output_type, + temperature=temperature, + max_tokens=max_tokens, + stream=stream + ) + else: + return await self._generate_openai_output( + messages=messages, + model=model, + output_type=output_type, + temperature=temperature, + max_tokens=max_tokens, + stream=stream + ) + except Exception as e: + logger.error(f"Error generating output: {str(e)}") + raise + + async def _generate_openai_output( + self, + messages: List[Dict[str, str]], + model: str, + output_type: str = 'text', + temperature: float = 0.0, + max_tokens: Optional[int] = None, + stream: bool = False + ) -> Union[str, Dict[str, Any]]: + """Generate output using OpenAI API.""" + try: + kwargs = { + "model": model, + "messages": messages, + "temperature": temperature, + } + + if max_tokens: + kwargs["max_tokens"] = max_tokens + + if output_type == 'json': + kwargs["response_format"] = {"type": "json_object"} + + if stream: + kwargs["stream"] = True + + completion = await self.openai_client.chat.completions.create(**kwargs) + + if stream: + return completion + else: + return completion.choices[0].message.content + + except Exception as e: + logger.error(f"OpenAI API error: {str(e)}") + raise + + async def _generate_litellm_output( + self, + messages: List[Dict[str, str]], + model: str, + output_type: str = 'text', + temperature: float = 0.0, + max_tokens: Optional[int] = None, + stream: bool = False + ) -> Union[str, Dict[str, Any]]: + """Generate output using LiteLLM.""" + try: + kwargs = { + "model": model, + "messages": messages, + "temperature": temperature, + } + + if max_tokens: + kwargs["max_tokens"] = max_tokens + + if output_type == 'json': + kwargs["response_format"] = {"type": "json_object"} + + if stream: + kwargs["stream"] = True + + completion = await acompletion(**kwargs) + + if stream: + return completion + else: + return completion.choices[0].message.content + + except Exception as e: + logger.error(f"LiteLLM API error: {str(e)}") + raise + +# Create a singleton instance +model_provider = ModelProvider() \ No newline at end of file diff --git a/custom_handler.py b/custom_handler.py new file mode 100644 index 000000000..8faba82f2 --- /dev/null +++ b/custom_handler.py @@ -0,0 +1,990 @@ +import litellm +import requests +import os +from typing import Optional, Union, Dict, Any, AsyncGenerator +import logging +import sys +import json +import asyncio +import aiohttp +import uuid +import re + +# Set up logging +logging.basicConfig(level=logging.WARNING) +logger = logging.getLogger(__name__) + +logger.debug("Loading custom_handler.py") +logger.debug(f"Python path: {sys.path}") + +class ModalLLM: + def __init__(self, api_base: Optional[str] = None): + logger.debug("Initializing ModalLLM") + self.api_base = api_base or "https://stevef1uk--ollama-api-api.modal.run" + logger.debug(f"Using API base: {self.api_base}") + # Log environment variables at initialization + logger.debug(f"Environment variables at init - TOKEN_ID: {os.getenv('TOKEN_ID')}, TOKEN_SECRET: {os.getenv('TOKEN_SECRET')}") + + def completion( + self, + model: str, + messages: list, + model_response: Optional[Dict] = None, + optional_params: Optional[Dict[str, Any]] = None, + **kwargs + ) -> Dict: + logger.debug("Starting completion request") + logger.debug(f"Model: {model}") + logger.debug(f"Messages: {messages}") + logger.debug(f"kwargs: {kwargs}") + logger.debug(f"optional_params: {optional_params}") + + # Get credentials from kwargs or environment variables + token = kwargs.get("api_key") or os.getenv("TOKEN_ID") + secret = kwargs.get("token_secret") or os.getenv("TOKEN_SECRET") + + logger.debug(f"Using token: {token[:8]}..." if token else "No token found") + logger.debug(f"Using secret: {secret[:8]}..." if secret else "No secret found") + + # Check if this is a proxy request + if "authorization" in kwargs: + logger.debug("Detected proxy request") + logger.debug(f"Authorization header: {kwargs['authorization']}") + # Extract credentials from proxy authorization + auth_header = kwargs["authorization"] + if auth_header.startswith("Bearer "): + auth_token = auth_header[7:] # Remove "Bearer " prefix + logger.debug(f"Auth token after removing Bearer: {auth_token}") + # If this is a LiteLLM virtual key, use environment variables + if auth_token.startswith("sk-"): + logger.debug("Detected LiteLLM virtual key") + # For proxy requests with virtual keys, use environment variables + token = os.getenv("TOKEN_ID") + secret = os.getenv("TOKEN_SECRET") + if not token or not secret: + error_msg = "Modal credentials not found in environment variables" + logger.error(error_msg) + raise ValueError(error_msg) + logger.debug(f"Using Modal credentials from environment - Token: {token[:8]}..., Secret: {secret[:8]}...") + else: + # Try to split the token into key and secret + if ":" in auth_token: + token, secret = auth_token.split(":", 1) + logger.debug("Extracted credentials from proxy authorization with key:secret format") + logger.debug(f"Token: {token[:8]}..., Secret: {secret[:8]}...") + else: + # If no colon, use the token as is + token = auth_token + logger.debug("Using token directly from proxy authorization") + logger.debug(f"Token: {token[:8]}...") + else: + logger.error("Invalid proxy authorization format") + raise ValueError("Invalid proxy authorization format") + else: + logger.debug("No authorization header found in kwargs") + logger.debug("Available kwargs keys: " + str(list(kwargs.keys()))) + + if not token: + error_msg = "Missing token. Please provide a valid token." + logger.error(error_msg) + raise ValueError(error_msg) + + headers = { + "Content-Type": "application/json", + "Modal-Key": token + } + + # Only add Modal-Secret if we have it + if secret: + headers["Modal-Secret"] = secret + + logger.debug(f"Final headers: {headers}") + + # Extract model name from the input model string + # Handle format like "modal/mistral:latest" -> "mistral:latest" + if "/" in model: + model_name = model.split("/", 1)[1] # Get everything after the first / + else: + model_name = model + logger.debug(f"Extracted model name: {model_name}") + + # Combine all messages into a single prompt + prompt = "" + for msg in messages: + if msg["role"] == "system": + prompt += msg["content"] + "\n\n" + elif msg["role"] == "user": + prompt += "User: " + msg["content"] + "\n" + elif msg["role"] == "assistant": + prompt += "Assistant: " + msg["content"] + "\n" + prompt = prompt.strip() + logger.debug(f"Combined prompt: {prompt}") + + # Prepare the payload + payload = { + "prompt": prompt, + "temperature": 0.7, + "model": model_name + } + + # Add tools if provided + if "tools" in kwargs: + tools = kwargs["tools"] + logger.debug(f"Tools provided: {tools}") + # Include tools in the prompt instead of as a separate parameter + tool_descriptions = [] + for tool in tools: + if "function" in tool: + tool_desc = f"- {tool['function']['name']}: {tool['function']['description']}" + if "parameters" in tool["function"]: + params = tool["function"]["parameters"] + if "properties" in params: + param_desc = [] + for param_name, param_info in params["properties"].items(): + param_desc.append(f"{param_name} ({param_info.get('type', 'any')})") + tool_desc += f" Parameters: {', '.join(param_desc)}" + tool_descriptions.append(tool_desc) + + if tool_descriptions: + prompt += "\n\nAvailable tools:\n" + "\n".join(tool_descriptions) + prompt += "\n\nYou are a helpful assistant that can use tools. When you need to use a tool, you must respond with a JSON object in this exact format:\n" + prompt += """{ + "role": "assistant", + "content": null, + "tool_calls": [{ + "function": { + "name": "TOOL_NAME", + "arguments": "{\\"param1\\": value1}" + }, + "id": "call_123", + "type": "function" + }], + "tool_call_id": null, + "tool_name": null, + "response_type": "internal" +}""" + + # Add the user's query + for msg in messages: + if msg["role"] == "user": + prompt += f"\n\nUser: {msg['content']}" + break + + # Add explicit instruction to use tools + prompt += "\n\nIMPORTANT: You must use the available tools to help the user. Do not respond with regular text unless you have used all relevant tools first. When using a tool, make sure to use the exact format shown above." + payload["prompt"] = prompt + logger.debug(f"Updated prompt with tools: {prompt}") + + logger.debug(f"Final payload with model: {payload}") + + try: + logger.debug(f"Making request to {self.api_base}") + logger.debug(f"Headers: {headers}") + logger.debug(f"Payload: {payload}") + + # Log the exact curl command that would be used + curl_command = f"""curl -X POST "{self.api_base}" \\ + -H "Content-Type: application/json" \\ + -H "Modal-Key: {token}" \\ + -H "Modal-Secret: {secret}" \\ + -d '{json.dumps(payload)}'""" + logger.debug(f"Equivalent curl command:\n{curl_command}") + + # Use requests.Session for better connection handling + with requests.Session() as session: + try: + logger.debug("Creating session and preparing request") + response = session.post( + self.api_base, + json=payload, + headers=headers, + verify=True, + timeout=60, # Increased timeout + stream=True # Enable streaming + ) + logger.debug("Request completed") + except requests.exceptions.SSLError as e: + logger.error(f"SSL Error: {str(e)}") + raise + except requests.exceptions.ConnectionError as e: + logger.error(f"Connection Error: {str(e)}") + raise + except requests.exceptions.Timeout as e: + logger.error(f"Timeout Error: {str(e)}") + raise + except requests.exceptions.RequestException as e: + logger.error(f"Request Exception: {str(e)}") + raise + + logger.debug(f"Response status: {response.status_code}") + logger.debug(f"Response headers: {dict(response.headers)}") + + if response.status_code == 401: + logger.error("Authentication failed. Please check your Modal credentials") + raise Exception("Authentication failed. Please check your Modal credentials") + + response.raise_for_status() + + # Handle streaming response + full_response = "" + for chunk in response.iter_content(chunk_size=None, decode_unicode=True): + if chunk: + logger.debug(f"Received chunk: {chunk}") + full_response += chunk + + logger.debug(f"Full response: {full_response}") + + try: + # Try to parse the response as JSON + response_data = json.loads(full_response) + logger.debug(f"Parsed JSON response: {response_data}") + + # Transform the response to the correct format + transformed_data = transform_to_correct_format(full_response) + if transformed_data: + response_data = transformed_data + + # Extract the response text + if isinstance(response_data, dict): + response_text = response_data.get("response", "") + # Check for tool calls in the response + if "tool_calls" in response_data: + return { + "id": "modal-response", + "choices": [{ + "message": { + "role": "assistant", + "content": response_text, + "tool_calls": response_data["tool_calls"] + }, + "finish_reason": "tool_calls" + }], + "created": 0, + "model": model, + "usage": {} + } + else: + response_text = str(response_data) + except json.JSONDecodeError: + # If not JSON, treat as raw text + logger.debug("Response is not JSON, treating as raw text") + if "assistant" in full_response.lower(): + response_text = full_response.split("assistant", 1)[1].strip() + else: + response_text = full_response.strip() + + # Remove any remaining user prompt + if "user" in response_text.lower(): + response_text = response_text.split("user", 1)[0].strip() + + # Try to transform text response into tool call + transformed_data = transform_to_correct_format(response_text) + if transformed_data and "tool_calls" in transformed_data: + return { + "id": "modal-response", + "choices": [{ + "message": { + "role": "assistant", + "content": "", + "tool_calls": transformed_data["tool_calls"] + }, + "finish_reason": "tool_calls" + }], + "created": 0, + "model": model, + "usage": {} + } + + if not response_text: + logger.error(f"Unexpected response format: {full_response}") + raise Exception("Unexpected response format from Modal API") + + return { + "id": "modal-response", + "choices": [{ + "message": { + "role": "assistant", + "content": response_text + }, + "finish_reason": "stop" + }], + "created": 0, + "model": model, + "usage": {} + } + + except requests.exceptions.RequestException as e: + logger.error(f"Request failed: {str(e)}") + if hasattr(e, 'response') and e.response is not None: + logger.error(f"Response text: {e.response.text}") + raise Exception(f"Error connecting to Modal API: {str(e)}") + + async def acompletion( + self, + model: str, + messages: list, + model_response: Optional[Dict] = None, + optional_params: Optional[Dict[str, Any]] = None, + **kwargs + ) -> Dict: + logger.debug("Starting async completion request") + logger.debug(f"Model: {model}") + logger.debug(f"Messages: {messages}") + logger.debug(f"kwargs: {kwargs}") + logger.debug(f"optional_params: {optional_params}") + + # Get credentials from kwargs or environment variables + token = kwargs.get("api_key") or os.getenv("TOKEN_ID") + secret = kwargs.get("token_secret") or os.getenv("TOKEN_SECRET") + + logger.debug(f"Using token: {token[:8]}..." if token else "No token found") + logger.debug(f"Using secret: {secret[:8]}..." if secret else "No secret found") + + # Check if this is a proxy request + if "authorization" in kwargs: + logger.debug("Detected proxy request") + logger.debug(f"Authorization header: {kwargs['authorization']}") + # Extract credentials from proxy authorization + auth_header = kwargs["authorization"] + if auth_header.startswith("Bearer "): + auth_token = auth_header[7:] # Remove "Bearer " prefix + logger.debug(f"Auth token after removing Bearer: {auth_token}") + # If this is a LiteLLM virtual key, use environment variables + if auth_token.startswith("sk-"): + logger.debug("Detected LiteLLM virtual key") + # For proxy requests with virtual keys, use environment variables + token = os.getenv("TOKEN_ID") + secret = os.getenv("TOKEN_SECRET") + if not token or not secret: + error_msg = "Modal credentials not found in environment variables" + logger.error(error_msg) + raise ValueError(error_msg) + logger.debug(f"Using Modal credentials from environment - Token: {token[:8]}..., Secret: {secret[:8]}...") + else: + # Try to split the token into key and secret + if ":" in auth_token: + token, secret = auth_token.split(":", 1) + logger.debug("Extracted credentials from proxy authorization with key:secret format") + logger.debug(f"Token: {token[:8]}..., Secret: {secret[:8]}...") + else: + # If no colon, use the token as is + token = auth_token + logger.debug("Using token directly from proxy authorization") + logger.debug(f"Token: {token[:8]}...") + else: + logger.error("Invalid proxy authorization format") + raise ValueError("Invalid proxy authorization format") + else: + logger.debug("No authorization header found in kwargs") + logger.debug("Available kwargs keys: " + str(list(kwargs.keys()))) + + if not token: + error_msg = "Missing token. Please provide a valid token." + logger.error(error_msg) + raise ValueError(error_msg) + + headers = { + "Content-Type": "application/json", + "Modal-Key": token + } + + # Only add Modal-Secret if we have it + if secret: + headers["Modal-Secret"] = secret + + logger.debug(f"Final headers: {headers}") + + # Extract model name from the input model string + # Handle format like "modal/mistral:latest" -> "mistral:latest" + if "/" in model: + model_name = model.split("/", 1)[1] # Get everything after the first / + else: + model_name = model + logger.debug(f"Extracted model name: {model_name}") + + # Combine all messages into a single prompt + prompt = "" + for msg in messages: + if msg["role"] == "system": + prompt += msg["content"] + "\n\n" + elif msg["role"] == "user": + prompt += "User: " + msg["content"] + "\n" + elif msg["role"] == "assistant": + prompt += "Assistant: " + msg["content"] + "\n" + prompt = prompt.strip() + logger.debug(f"Combined prompt: {prompt}") + + # Prepare the payload + payload = { + "prompt": prompt, + "temperature": 0.7, + "model": model_name + } + + # Add tools if provided + if "tools" in kwargs: + tools = kwargs["tools"] + logger.debug(f"Tools provided: {tools}") + # Include tools in the prompt instead of as a separate parameter + tool_descriptions = [] + for tool in tools: + if "function" in tool: + tool_desc = f"- {tool['function']['name']}: {tool['function']['description']}" + if "parameters" in tool["function"]: + params = tool["function"]["parameters"] + if "properties" in params: + param_desc = [] + for param_name, param_info in params["properties"].items(): + param_desc.append(f"{param_name} ({param_info.get('type', 'any')})") + tool_desc += f" Parameters: {', '.join(param_desc)}" + tool_descriptions.append(tool_desc) + + if tool_descriptions: + prompt += "\n\nAvailable tools:\n" + "\n".join(tool_descriptions) + prompt += "\n\nYou are a helpful assistant that can use tools. When you need to use a tool, you must respond with a JSON object in this exact format:\n" + prompt += """{ + "role": "assistant", + "content": null, + "tool_calls": [{ + "function": { + "name": "TOOL_NAME", + "arguments": "{\\"param1\\": value1}" + }, + "id": "call_123", + "type": "function" + }], + "tool_call_id": null, + "tool_name": null, + "response_type": "internal" +}""" + + # Add the user's query + for msg in messages: + if msg["role"] == "user": + prompt += f"\n\nUser: {msg['content']}" + break + + # Add explicit instruction to use tools + prompt += "\n\nIMPORTANT: You must use the available tools to help the user. Do not respond with regular text unless you have used all relevant tools first. When using a tool, make sure to use the exact format shown above." + payload["prompt"] = prompt + logger.debug(f"Updated prompt with tools: {prompt}") + + logger.debug(f"Final payload with model: {payload}") + + try: + logger.debug(f"Making async request to {self.api_base}") + logger.debug(f"Headers: {headers}") + logger.debug(f"Payload: {payload}") + + # Use aiohttp for async requests + async with aiohttp.ClientSession() as session: + try: + logger.debug("Creating session and preparing request") + async with session.post( + self.api_base, + json=payload, + headers=headers, + ssl=True, + timeout=60, # Increased timeout + ) as response: + logger.debug("Request completed") + logger.debug(f"Response status: {response.status}") + logger.debug(f"Response headers: {dict(response.headers)}") + + if response.status == 401: + logger.error("Authentication failed. Please check your Modal credentials") + raise Exception("Authentication failed. Please check your Modal credentials") + + response.raise_for_status() + + # Read the response + full_response = await response.text() + logger.debug(f"Full response: {full_response}") + + try: + # Try to parse the response as JSON + response_data = json.loads(full_response) + logger.debug(f"Parsed JSON response: {response_data}") + + # Transform the response to the correct format + transformed_data = transform_to_correct_format(full_response) + if transformed_data: + response_data = transformed_data + + # Extract the response text + if isinstance(response_data, dict): + response_text = response_data.get("response", "") + # Check for tool calls in the response + if "tool_calls" in response_data: + return { + "id": "modal-response", + "choices": [{ + "message": { + "role": "assistant", + "content": response_text, + "tool_calls": response_data["tool_calls"] + }, + "finish_reason": "tool_calls" + }], + "created": 0, + "model": model, + "usage": {} + } + else: + response_text = str(response_data) + except json.JSONDecodeError: + # If not JSON, treat as raw text + logger.debug("Response is not JSON, treating as raw text") + if "assistant" in full_response.lower(): + response_text = full_response.split("assistant", 1)[1].strip() + else: + response_text = full_response.strip() + + # Remove any remaining user prompt + if "user" in response_text.lower(): + response_text = response_text.split("user", 1)[0].strip() + + if not response_text: + logger.error(f"Unexpected response format: {full_response}") + raise Exception("Unexpected response format from Modal API") + + return { + "id": "modal-response", + "choices": [{ + "message": { + "role": "assistant", + "content": response_text + }, + "finish_reason": "stop" + }], + "created": 0, + "model": model, + "usage": {} + } + + except aiohttp.ClientError as e: + logger.error(f"Request failed: {str(e)}") + raise Exception(f"Error connecting to Modal API: {str(e)}") + + except Exception as e: + logger.error(f"Unexpected error: {str(e)}") + raise + + async def astreaming( + self, + model: str, + messages: list, + model_response: Optional[Dict] = None, + optional_params: Optional[Dict[str, Any]] = None, + **kwargs + ) -> AsyncGenerator[Dict, None]: + logger.debug("Starting async streaming request") + logger.debug(f"Model: {model}") + logger.debug(f"Messages: {messages}") + logger.debug(f"kwargs: {kwargs}") + logger.debug(f"optional_params: {optional_params}") + + # Get credentials from kwargs or environment variables + token = kwargs.get("api_key") or os.getenv("TOKEN_ID") + secret = kwargs.get("token_secret") or os.getenv("TOKEN_SECRET") + + logger.debug(f"Using token: {token[:8]}..." if token else "No token found") + logger.debug(f"Using secret: {secret[:8]}..." if secret else "No secret found") + + # Check if this is a proxy request + if "authorization" in kwargs: + logger.debug("Detected proxy request") + logger.debug(f"Authorization header: {kwargs['authorization']}") + # Extract credentials from proxy authorization + auth_header = kwargs["authorization"] + if auth_header.startswith("Bearer "): + auth_token = auth_header[7:] # Remove "Bearer " prefix + logger.debug(f"Auth token after removing Bearer: {auth_token}") + # If this is a LiteLLM virtual key, use environment variables + if auth_token.startswith("sk-"): + logger.debug("Detected LiteLLM virtual key") + # For proxy requests with virtual keys, use environment variables + token = os.getenv("TOKEN_ID") + secret = os.getenv("TOKEN_SECRET") + if not token or not secret: + error_msg = "Modal credentials not found in environment variables" + logger.error(error_msg) + raise ValueError(error_msg) + logger.debug(f"Using Modal credentials from environment - Token: {token[:8]}..., Secret: {secret[:8]}...") + else: + # Try to split the token into key and secret + if ":" in auth_token: + token, secret = auth_token.split(":", 1) + logger.debug("Extracted credentials from proxy authorization with key:secret format") + logger.debug(f"Token: {token[:8]}..., Secret: {secret[:8]}...") + else: + # If no colon, use the token as is + token = auth_token + logger.debug("Using token directly from proxy authorization") + logger.debug(f"Token: {token[:8]}...") + else: + logger.error("Invalid proxy authorization format") + raise ValueError("Invalid proxy authorization format") + else: + logger.debug("No authorization header found in kwargs") + logger.debug("Available kwargs keys: " + str(list(kwargs.keys()))) + + if not token: + error_msg = "Missing token. Please provide a valid token." + logger.error(error_msg) + raise ValueError(error_msg) + + headers = { + "Content-Type": "application/json", + "Modal-Key": token + } + + # Only add Modal-Secret if we have it + if secret: + headers["Modal-Secret"] = secret + + logger.debug(f"Final headers: {headers}") + + # Extract model name from the input model string + # Handle format like "modal/mistral:latest" -> "mistral:latest" + if "/" in model: + model_name = model.split("/", 1)[1] # Get everything after the first / + else: + model_name = model + logger.debug(f"Extracted model name: {model_name}") + + # Combine all messages into a single prompt + prompt = "" + for msg in messages: + if msg["role"] == "system": + prompt += msg["content"] + "\n\n" + elif msg["role"] == "user": + prompt += "User: " + msg["content"] + "\n" + elif msg["role"] == "assistant": + prompt += "Assistant: " + msg["content"] + "\n" + prompt = prompt.strip() + logger.debug(f"Combined prompt: {prompt}") + + # Prepare the payload + payload = { + "prompt": prompt, + "temperature": 0.7, + "model": model_name + } + + # Add tools if provided + if "tools" in kwargs: + tools = kwargs["tools"] + logger.debug(f"Tools provided: {tools}") + # Include tools in the prompt instead of as a separate parameter + tool_descriptions = [] + for tool in tools: + if "function" in tool: + tool_desc = f"- {tool['function']['name']}: {tool['function']['description']}" + if "parameters" in tool["function"]: + params = tool["function"]["parameters"] + if "properties" in params: + param_desc = [] + for param_name, param_info in params["properties"].items(): + param_desc.append(f"{param_name} ({param_info.get('type', 'any')})") + tool_desc += f" Parameters: {', '.join(param_desc)}" + tool_descriptions.append(tool_desc) + + if tool_descriptions: + prompt += "\n\nAvailable tools:\n" + "\n".join(tool_descriptions) + prompt += "\n\nYou are a helpful assistant that can use tools. When you need to use a tool, you must respond with a JSON object in this exact format:\n" + prompt += """{ + "role": "assistant", + "content": null, + "tool_calls": [{ + "function": { + "name": "TOOL_NAME", + "arguments": "{\\"param1\\": value1}" + }, + "id": "call_123", + "type": "function" + }], + "tool_call_id": null, + "tool_name": null, + "response_type": "internal" +}""" + + # Add the user's query + for msg in messages: + if msg["role"] == "user": + prompt += f"\n\nUser: {msg['content']}" + break + + # Add explicit instruction to use tools + prompt += "\n\nIMPORTANT: You must use the available tools to help the user. Do not respond with regular text unless you have used all relevant tools first. When using a tool, make sure to use the exact format shown above." + payload["prompt"] = prompt + logger.debug(f"Updated prompt with tools: {prompt}") + + logger.debug(f"Final payload with model: {payload}") + + try: + logger.debug(f"Making async streaming request to {self.api_base}") + logger.debug(f"Headers: {headers}") + logger.debug(f"Payload: {payload}") + + # Use aiohttp for async requests + async with aiohttp.ClientSession() as session: + try: + logger.debug("Creating session and preparing request") + async with session.post( + self.api_base, + json=payload, + headers=headers, + ssl=True, + timeout=60, # Increased timeout + ) as response: + logger.debug("Request completed") + logger.debug(f"Response status: {response.status}") + logger.debug(f"Response headers: {dict(response.headers)}") + + if response.status == 401: + logger.error("Authentication failed. Please check your Modal credentials") + raise Exception("Authentication failed. Please check your Modal credentials") + + response.raise_for_status() + + # Read the response + full_response = await response.text() + logger.debug(f"Full response: {full_response}") + + try: + # Try to parse the response as JSON + response_data = json.loads(full_response) + logger.debug(f"Parsed JSON response: {response_data}") + + # Transform the response to the correct format + transformed_data = transform_to_correct_format(full_response) + if transformed_data: + response_data = transformed_data + + # Extract the response text + if isinstance(response_data, dict): + response_text = response_data.get("response", "") + # Check for tool calls in the response + if "tool_calls" in response_data: + yield { + "text": "", + "is_finished": True, + "finish_reason": "tool_calls", + "tool_calls": response_data["tool_calls"], + "usage": { + "prompt_tokens": len(prompt.split()), + "completion_tokens": 0, + "total_tokens": len(prompt.split()) + } + } + return + else: + response_text = str(response_data) + except json.JSONDecodeError: + # If not JSON, treat as raw text + logger.debug("Response is not JSON, treating as raw text") + if "assistant" in full_response.lower(): + response_text = full_response.split("assistant", 1)[1].strip() + else: + response_text = full_response.strip() + + # Remove any remaining user prompt + if "user" in response_text.lower(): + response_text = response_text.split("user", 1)[0].strip() + + if not response_text: + logger.error(f"Unexpected response format: {full_response}") + raise Exception("Unexpected response format from Modal API") + + # Calculate token usage (rough estimate) + prompt_tokens = len(prompt.split()) # Rough estimate + completion_tokens = len(response_text.split()) # Rough estimate + + # Yield the response in chunks + chunk_size = 10 # Adjust this value based on your needs + for i in range(0, len(response_text), chunk_size): + chunk = response_text[i:i + chunk_size] + is_last_chunk = i + chunk_size >= len(response_text) + # Format the response to match what LiteLLM expects + yield { + "text": chunk, + "is_finished": is_last_chunk, + "finish_reason": "stop" if is_last_chunk else None, + "usage": { + "prompt_tokens": prompt_tokens, + "completion_tokens": completion_tokens, + "total_tokens": prompt_tokens + completion_tokens + } + } + + except aiohttp.ClientError as e: + logger.error(f"Request failed: {str(e)}") + raise Exception(f"Error connecting to Modal API: {str(e)}") + + except Exception as e: + logger.error(f"Unexpected error: {str(e)}") + raise + +def transform_to_correct_format(response): + """Transform the response into the correct format.""" + if isinstance(response, str): + response = clean_response(response) + try: + # First try to parse as JSON + try: + data = json.loads(response) + except json.JSONDecodeError: + # If not valid JSON, check if it's a text response that should be a tool call + # Look for tool markers in the text + if "[@tool:" in response: + # Extract tool name and parameters + tool_match = re.search(r'\[@tool:(\w+)\](.*?)(?=\[@tool:|$)', response, re.DOTALL) + if tool_match: + tool_name = tool_match.group(1) + tool_text = tool_match.group(2).strip() + + # Try to extract parameters from the text + # Look for JSON-like structure in the text + json_match = re.search(r'\{.*\}', tool_text, re.DOTALL) + if json_match: + try: + params = json.loads(json_match.group(0)) + except json.JSONDecodeError: + # If not valid JSON, try to parse key-value pairs + params = {} + # Look for key: value or key=value patterns + kv_pairs = re.finditer(r'(\w+)\s*[:=]\s*([^,\n]+)', tool_text) + for match in kv_pairs: + key = match.group(1).strip() + value = match.group(2).strip() + # Try to convert value to appropriate type + try: + if value.lower() == 'true': + params[key] = True + elif value.lower() == 'false': + params[key] = False + elif value.isdigit(): + params[key] = int(value) + elif re.match(r'^-?\d*\.\d+$', value): + params[key] = float(value) + else: + params[key] = value + except ValueError: + params[key] = value + + # Only create tool call if we found parameters + if params: + return { + "role": "assistant", + "content": "", + "tool_calls": [{ + "function": { + "name": tool_name, + "arguments": json.dumps(params) + }, + "id": f"call_{uuid.uuid4().hex[:8]}", + "type": "function" + }], + "tool_call_id": None, + "tool_name": None, + "response_type": "internal" + } + return None + + # Handle legacy format with name/parameters + if "tool_calls" in data: + tool_calls = [] + for tool_call in data["tool_calls"]: + if "name" in tool_call and "parameters" in tool_call: + tool_calls.append({ + "function": { + "name": tool_call["name"], + "arguments": json.dumps(tool_call["parameters"]) + }, + "id": f"call_{uuid.uuid4().hex[:8]}", + "type": "function" + }) + return { + "role": "assistant", + "content": "", + "tool_calls": tool_calls, + "tool_call_id": None, + "tool_name": None, + "response_type": "internal" + } + + # If the response is already in the correct format, just fix arguments + if all(key in data for key in ["role", "content", "tool_calls", "tool_call_id", "tool_name", "response_type"]): + # Convert null content to empty string + if data["content"] is None: + data["content"] = "" + for tool_call in data["tool_calls"]: + if isinstance(tool_call["function"]["arguments"], dict): + tool_call["function"]["arguments"] = json.dumps(tool_call["function"]["arguments"]) + return data + + # If tool_calls is present and is a list, fix arguments if needed + if "tool_calls" in data and isinstance(data["tool_calls"], list): + transformed_tool_calls = [] + for tool_call in data["tool_calls"]: + if "function" in tool_call and isinstance(tool_call["function"], dict): + # Already in function format, just ensure arguments is a string + if isinstance(tool_call["function"].get("arguments"), dict): + tool_call["function"]["arguments"] = json.dumps(tool_call["function"]["arguments"]) + transformed_tool_calls.append(tool_call) + if transformed_tool_calls: # Only return if we found function format tool calls + return { + "role": "assistant", + "content": "", + "tool_calls": transformed_tool_calls, + "tool_call_id": None, + "tool_name": None, + "response_type": "internal" + } + + except Exception as e: + logger.error(f"Error transforming response: {str(e)}") + logger.error(f"Response data: {data if 'data' in locals() else 'No data'}") + return None + return None + +def clean_response(response): + """Clean the response by removing any markers.""" + # Remove any markers + markers = [ + ("<|python_start|>", "<|python_end|>"), + ("<|header_start|>", "<|header_end|>"), + ("<|im_start|>", "<|im_end|>"), + ("<|assistant|>", "<|user|>") + ] + for start, end in markers: + if start in response and end in response: + response = response.split(start)[1].split(end)[0] + return response.strip() + +class ModalLLMFactory: + def __init__(self): + self.instance = None + + def __call__(self, *args, **kwargs): + if self.instance is None: + self.instance = ModalLLM(*args, **kwargs) + return self.instance + + def completion(self, *args, **kwargs): + if self.instance is None: + self.instance = ModalLLM() + return self.instance.completion(*args, **kwargs) + + def acompletion(self, *args, **kwargs): + if self.instance is None: + self.instance = ModalLLM() + return self.instance.acompletion(*args, **kwargs) + + def astreaming(self, *args, **kwargs): + if self.instance is None: + self.instance = ModalLLM() + return self.instance.astreaming(*args, **kwargs) + +modal_llm = ModalLLMFactory() \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index f0384d747..66d53cbe6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,13 @@ version: '3.8' +volumes: + uploads: + driver: local + driver_opts: + type: none + o: bind + device: ./data/uploads + services: rowboat: build: @@ -22,9 +30,10 @@ services: - COPILOT_API_KEY=${COPILOT_API_KEY} - REDIS_URL=redis://redis:6379 - USE_RAG=${USE_RAG} - - QDRANT_URL=${QDRANT_URL} + - QDRANT_URL=http://qdrant:6333 - QDRANT_API_KEY=${QDRANT_API_KEY} - USE_RAG_UPLOADS=${USE_RAG_UPLOADS} + - USE_RAG_S3_UPLOADS=${USE_RAG_S3_UPLOADS} - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} - RAG_UPLOADS_S3_BUCKET=${RAG_UPLOADS_S3_BUCKET} @@ -37,7 +46,11 @@ services: - MAX_QUERIES_PER_MINUTE=${MAX_QUERIES_PER_MINUTE} - MAX_PROJECTS_PER_USER=${MAX_PROJECTS_PER_USER} - VOICE_API_URL=${VOICE_API_URL} + - PROVIDER_DEFAULT_MODEL=${PROVIDER_DEFAULT_MODEL} + - RAG_UPLOADS_DIR=/app/uploads restart: unless-stopped + volumes: + - uploads:/app/uploads rowboat_agents: build: @@ -50,8 +63,13 @@ services: - API_KEY=${AGENTS_API_KEY} - REDIS_URL=redis://redis:6379 - MONGODB_URI=mongodb://mongo:27017/rowboat - - QDRANT_URL=${QDRANT_URL} + - QDRANT_URL=http://qdrant:6333 - QDRANT_API_KEY=${QDRANT_API_KEY} + - PROVIDER_BASE_URL=${PROVIDER_BASE_URL} + - PROVIDER_API_KEY=${PROVIDER_API_KEY} + - PROVIDER_DEFAULT_MODEL=${PROVIDER_DEFAULT_MODEL} + - MAX_CALLS_PER_CHILD_AGENT=${MAX_CALLS_PER_CHILD_AGENT} + - ENABLE_TRACING=${ENABLE_TRACING} restart: unless-stopped copilot: @@ -63,6 +81,10 @@ services: environment: - OPENAI_API_KEY=${OPENAI_API_KEY} - API_KEY=${COPILOT_API_KEY} + - PROVIDER_BASE_URL=${PROVIDER_BASE_URL} + - PROVIDER_API_KEY=${PROVIDER_API_KEY} + - PROVIDER_DEFAULT_MODEL=${PROVIDER_DEFAULT_MODEL} + - PROVIDER_COPILOT_MODEL=${PROVIDER_COPILOT_MODEL} restart: unless-stopped # tools_webhook: @@ -89,21 +111,28 @@ services: build: context: ./apps/rowboat dockerfile: scripts.Dockerfile - command: ["sh", "-c", "npm run setupQdrant && echo 'index created successfully'"] + command: ["sh", "-c", "npm run setupQdrant"] profiles: [ "setup_qdrant" ] + depends_on: + qdrant: + condition: service_healthy environment: - - QDRANT_URL=${QDRANT_URL} + - QDRANT_URL=http://qdrant:6333 - QDRANT_API_KEY=${QDRANT_API_KEY} + - EMBEDDING_VECTOR_SIZE=${EMBEDDING_VECTOR_SIZE} restart: no delete_qdrant: build: context: ./apps/rowboat dockerfile: scripts.Dockerfile - command: ["sh", "-c", "npm run deleteQdrant && echo 'index deleted successfully'"] + command: ["sh", "-c", "npm run deleteQdrant"] profiles: [ "delete_qdrant" ] + depends_on: + qdrant: + condition: service_healthy environment: - - QDRANT_URL=${QDRANT_URL} + - QDRANT_URL=http://qdrant:6333 - QDRANT_API_KEY=${QDRANT_API_KEY} restart: no @@ -115,15 +144,23 @@ services: profiles: [ "rag_files_worker" ] environment: - OPENAI_API_KEY=${OPENAI_API_KEY} + - EMBEDDING_PROVIDER_BASE_URL=${EMBEDDING_PROVIDER_BASE_URL} + - EMBEDDING_PROVIDER_API_KEY=${EMBEDDING_PROVIDER_API_KEY} + - EMBEDDING_MODEL=${EMBEDDING_MODEL} - MONGODB_CONNECTION_STRING=mongodb://mongo:27017/rowboat + - REDIS_URL=redis://redis:6379 - GOOGLE_API_KEY=${GOOGLE_API_KEY} - AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} - AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} - RAG_UPLOADS_S3_BUCKET=${RAG_UPLOADS_S3_BUCKET} - RAG_UPLOADS_S3_REGION=${RAG_UPLOADS_S3_REGION} - - QDRANT_URL=${QDRANT_URL} + - QDRANT_URL=http://qdrant:6333 - QDRANT_API_KEY=${QDRANT_API_KEY} + - RAG_UPLOADS_DIR=/app/uploads + - USE_GEMINI_FILE_PARSING=${USE_GEMINI_FILE_PARSING} restart: unless-stopped + volumes: + - uploads:/app/uploads rag_urls_worker: build: @@ -133,9 +170,13 @@ services: profiles: [ "rag_urls_worker" ] environment: - OPENAI_API_KEY=${OPENAI_API_KEY} + - EMBEDDING_PROVIDER_BASE_URL=${EMBEDDING_PROVIDER_BASE_URL} + - EMBEDDING_PROVIDER_API_KEY=${EMBEDDING_PROVIDER_API_KEY} + - EMBEDDING_MODEL=${EMBEDDING_MODEL} - MONGODB_CONNECTION_STRING=mongodb://mongo:27017/rowboat + - REDIS_URL=redis://redis:6379 - FIRECRAWL_API_KEY=${FIRECRAWL_API_KEY} - - QDRANT_URL=${QDRANT_URL} + - QDRANT_URL=http://qdrant:6333 - QDRANT_API_KEY=${QDRANT_API_KEY} restart: unless-stopped @@ -147,8 +188,12 @@ services: profiles: [ "rag_text_worker" ] environment: - OPENAI_API_KEY=${OPENAI_API_KEY} + - EMBEDDING_PROVIDER_BASE_URL=${EMBEDDING_PROVIDER_BASE_URL} + - EMBEDDING_PROVIDER_API_KEY=${EMBEDDING_PROVIDER_API_KEY} + - EMBEDDING_MODEL=${EMBEDDING_MODEL} - MONGODB_CONNECTION_STRING=mongodb://mongo:27017/rowboat - - QDRANT_URL=${QDRANT_URL} + - REDIS_URL=redis://redis:6379 + - QDRANT_URL=http://qdrant:6333 - QDRANT_API_KEY=${QDRANT_API_KEY} restart: unless-stopped @@ -199,3 +244,21 @@ services: # - ROWBOAT_API_HOST=http://rowboat:3000 # - MONGODB_URI=mongodb://mongo:27017/rowboat # restart: unless-stopped + + qdrant: + build: + context: . + dockerfile: Dockerfile.qdrant + ports: + - "6333:6333" + environment: + - QDRANT__STORAGE__STORAGE_PATH=/data/qdrant + restart: unless-stopped + profiles: [ "qdrant" ] + volumes: + - ./data/qdrant:/data/qdrant + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:6333/healthz"] + interval: 5s + timeout: 10s + retries: 3 diff --git a/k8s/arm64/app-config.yaml b/k8s/arm64/app-config.yaml index fa25956d5..157c00403 100644 --- a/k8s/arm64/app-config.yaml +++ b/k8s/arm64/app-config.yaml @@ -5,10 +5,10 @@ metadata: data: REDIS_URL: "redis://redis:6379" MONGODB_CONNECTION_STRING: "mongodb://admin:password@mongodb:27017/rowboat?authSource=admin" - AUTH0_DOMAIN: "" - AUTH0_AUDIENCE: "" - AUTH0_BASE_URL: "" - AUTH0_ISSUER_BASE_URL: "" + AUTH0_DOMAIN: "dev-8yopv4xu0lclcsv3.us.auth0.com" + AUTH0_AUDIENCE: "https://dev-8yopv4xu0lclcsv3.us.auth0.com/api/v2/" + AUTH0_BASE_URL: "https://k3s.sjfisher.com" + AUTH0_ISSUER_BASE_URL: "https://dev-8yopv4xu0lclcsv3.us.auth0.com" USE_AUTH: "false" USE_RAG: "true" AGENTS_API_URL: "http://rowboat-agents:3001" @@ -20,8 +20,8 @@ data: MAX_QUERIES_PER_MINUTE: "60" MAX_PROJECTS_PER_USER: "10" VOICE_API_URL: "" - PROVIDER_DEFAULT_MODEL: "gpt-4" - PROVIDER_COPILOT_MODEL: "gpt-4" - PROVIDER_BASE_URL: "" + PROVIDER_DEFAULT_MODEL: "groq/llama-3.3-70b-versatile" + PROVIDER_COPILOT_MODEL: "gpt-4.1" + PROVIDER_BASE_URL: "http://litellm-proxy:4000/v1" MAX_CALLS_PER_CHILD_AGENT: "10" - ENABLE_TRACING: "false" + ENABLE_TRACING: "false" \ No newline at end of file diff --git a/k8s/arm64/app-secrets.yaml b/k8s/arm64/app-secrets.yaml index eaffd532a..cd650bd1b 100644 --- a/k8s/arm64/app-secrets.yaml +++ b/k8s/arm64/app-secrets.yaml @@ -10,12 +10,12 @@ stringData: # 3. Generate a random secret: Run this command in your terminal: # openssl rand -hex 32 AUTH0_CLIENT_ID: "" - AUTH0_CLIENT_SECRET: "-E3" + AUTH0_CLIENT_SECRET: "" AUTH0_SECRET: "" # API Keys and sensitive configuration - OPENAI_API_KEY: "" # Add your OpenAI API key - AGENTS_API_KEY: "" # Add your agents API key - COPILOT_API_KEY: "" # Add your copilot API key + OPENAI_API_KEY: "sk-z" # Add your OpenAI API key + AGENTS_API_KEY: "s" # Add your agents API key + COPILOT_API_KEY: "sk-proj-" # Add your copilot OpenAI API key QDRANT_URL: "http://localhost:6333" # Add your Qdrant URL QDRANT_API_KEY: "" # Add your Qdrant API key AWS_ACCESS_KEY_ID: "" # Add your AWS access key @@ -24,5 +24,5 @@ stringData: RAG_UPLOADS_S3_REGION: "" # Add your S3 region FIRECRAWL_API_KEY: "" # Add your Firecrawl API key CHAT_WIDGET_SESSION_JWT_SECRET: "" # Add your JWT secret - PROVIDER_API_KEY: "" # Add your provider API key - # Removed AUTH0_BASE_URL and AUTH0_ISSUER_BASE_URL as they are non-sensitive and already in app-config.yaml + PROVIDER_API_KEY: "sk-" # Add your LiteLLM key here + # Removed AUTH0_BASE_URL and AUTH0_ISSUER_BASE_URL as they are non-sensitive and already in app-config.yaml diff --git a/k8s/arm64/apply.sh b/k8s/arm64/apply.sh deleted file mode 100644 index f1ee57f28..000000000 --- a/k8s/arm64/apply.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/bash - -# Apply all configurations -kubectl apply -f app-config.yaml -kubectl apply -f app-secrets.yaml -kubectl apply -f openai-secret.yaml -kubectl apply -f copilot-deployment.yaml -kubectl apply -f mongodb-vpc.yaml -kubectl apply -f mongodb-deployment.yaml -kubectl apply -f redis-deployment.yaml -kubectl apply -f rowboat-agents-go-deployment.yaml -kubectl apply -f rowboat-deployment.yaml - -# Wait for 2 seconds before starting restarts -sleep 2 - -# Restart deployments with delays between each -echo "Restarting redis deployment..." -kubectl rollout restart deployment/redis -sleep 2 - -echo "Restarting rowboat-agents deployment..." -kubectl rollout restart deployment/rowboat-agents -sleep 2 - -echo "Restarting copilot deployment..." -kubectl rollout restart deployment/copilot -sleep 2 - -echo "Restarting rowboat deployment..." -kubectl rollout restart deployment/rowboat \ No newline at end of file diff --git a/k8s/arm64/copilot-deployment.yaml b/k8s/arm64/copilot-deployment.yaml index 00c69e025..ce837feb3 100644 --- a/k8s/arm64/copilot-deployment.yaml +++ b/k8s/arm64/copilot-deployment.yaml @@ -40,10 +40,7 @@ spec: name: app-secrets key: COPILOT_API_KEY - name: PROVIDER_BASE_URL - valueFrom: - configMapKeyRef: - name: app-config - key: PROVIDER_BASE_URL + value: "http://litellm-proxy:4000/v1" - name: PROVIDER_API_KEY valueFrom: secretKeyRef: @@ -59,6 +56,8 @@ spec: configMapKeyRef: name: app-config key: PROVIDER_COPILOT_MODEL + - name: LITELLM_API_KEY + value: "Bearer sk-ZpzWhDlT-GZIqhnW8JcqHw" resources: requests: cpu: "500m" diff --git a/k8s/arm64/openai-secret.yaml b/k8s/arm64/openai-secret.yaml index 537f858a1..29d9b2d17 100644 --- a/k8s/arm64/openai-secret.yaml +++ b/k8s/arm64/openai-secret.yaml @@ -4,4 +4,4 @@ metadata: name: openai-secret type: Opaque data: - api-key: # Base 64 OpenAPI key + api-key: #Base 64 OpenAI Key diff --git a/k8s/arm64/rowboat-agents-deployment.yaml b/k8s/arm64/rowboat-agents-deployment.yaml index a0124fbbb..938ffbefc 100644 --- a/k8s/arm64/rowboat-agents-deployment.yaml +++ b/k8s/arm64/rowboat-agents-deployment.yaml @@ -25,7 +25,8 @@ spec: - "true" containers: - name: rowboat-agents - image: stevef1uk/rowboat-agents:arm64 + image: stevef1uk/rowboat_agents:arm64 + imagePullPolicy: Always ports: - containerPort: 3001 env: @@ -39,10 +40,7 @@ spec: name: app-secrets key: OPENAI_API_KEY - name: PROVIDER_BASE_URL - valueFrom: - configMapKeyRef: - name: app-config - key: PROVIDER_BASE_URL + value: "http://litellm-proxy:4000/v1" - name: PROVIDER_API_KEY valueFrom: secretKeyRef: @@ -53,6 +51,25 @@ spec: configMapKeyRef: name: app-config key: PROVIDER_DEFAULT_MODEL + - name: PROVIDER_COPILOT_MODEL + valueFrom: + configMapKeyRef: + name: app-config + key: PROVIDER_COPILOT_MODEL + - name: MAX_CALLS_PER_CHILD_AGENT + valueFrom: + configMapKeyRef: + name: app-config + key: MAX_CALLS_PER_CHILD_AGENT + - name: ENABLE_TRACING + valueFrom: + configMapKeyRef: + name: app-config + key: ENABLE_TRACING + - name: MCP_SERVER_URL + value: "http://mcp-api-service:8000/sse" + - name: LITELLM_API_KEY + value: "Bearer sk-ZpzWhDlT-GZIqhnW8JcqHw" resources: requests: memory: "512Mi" @@ -71,4 +88,4 @@ spec: ports: - port: 3001 targetPort: 3001 - type: ClusterIP \ No newline at end of file + type: ClusterIP diff --git a/k8s/arm64/rowboat-agents-go-deployment.yaml b/k8s/arm64/rowboat-agents-go-deployment.yaml index 2795d9ffe..8e65dbcc6 100644 --- a/k8s/arm64/rowboat-agents-go-deployment.yaml +++ b/k8s/arm64/rowboat-agents-go-deployment.yaml @@ -66,7 +66,7 @@ spec: "type": "conversation", "description": "An example agent", "instructions": "## šŸ§‘ā€ Role:\nYou are an helpful customer support assistant\n\n---\n## āš™ļø Steps to Follow:\n1. Ask the user what they would like help with\n2. Ask the user for their email address and let them know someone will contact them soon.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Asking the user their issue\n- Getting their email\n\nāŒ Out of Scope:\n- Questions unrelated to customer support\n- If a question is out of scope, politely inform the user and avoid providing an answer.\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- ask user their issue\n\nāŒ Don'ts:\n- don't ask user any other detail than email", - "model": "gpt-4o-2024-08-06", + "model": "${PROVIDER_DEFAULT_MODEL}", "controlType": "retain", "tools": [] }] diff --git a/k8s/arm64/rowboat_agents-deployment.yaml b/k8s/arm64/rowboat_agents-deployment.yaml index fe5075d1c..f6ace00de 100644 --- a/k8s/arm64/rowboat_agents-deployment.yaml +++ b/k8s/arm64/rowboat_agents-deployment.yaml @@ -26,64 +26,41 @@ spec: containers: - name: rowboat-agents image: stevef1uk/rowboat_agents:arm64 + imagePullPolicy: Always ports: - containerPort: 3001 env: + - name: NODE_ENV + value: "production" - name: PORT value: "3001" - - name: MONGODB_CONNECTION_STRING - value: "mongodb://admin:password@mongodb:27017/rowboat?authSource=admin" - - name: REDIS_URL - value: "redis://redis:6379" - - name: AUTH0_DOMAIN + - name: OPENAI_API_KEY valueFrom: - configMapKeyRef: - name: app-config - key: AUTH0_DOMAIN - - name: AUTH0_AUDIENCE + secretKeyRef: + name: app-secrets + key: OPENAI_API_KEY + - name: PROVIDER_BASE_URL valueFrom: configMapKeyRef: name: app-config - key: AUTH0_AUDIENCE - - name: AUTH0_BASE_URL + key: PROVIDER_BASE_URL + - name: PROVIDER_API_KEY valueFrom: - configMapKeyRef: - name: app-config - key: AUTH0_BASE_URL - - name: AUTH0_ISSUER_BASE_URL + secretKeyRef: + name: app-secrets + key: PROVIDER_API_KEY + - name: PROVIDER_DEFAULT_MODEL valueFrom: configMapKeyRef: name: app-config - key: AUTH0_ISSUER_BASE_URL - - name: OPENAI_API_KEY - valueFrom: - secretKeyRef: - name: openai-secret - key: api-key - - name: ENABLE_AGENT_SWITCHING - value: "false" - - name: DEFAULT_AGENT - value: "Example Agent" - - name: AGENT_CONFIGS - value: | - [{ - "name": "Example Agent", - "type": "conversation", - "description": "An example agent", - "instructions": "## šŸ§‘ā€ Role:\nYou are an helpful customer support assistant\n\n---\n## āš™ļø Steps to Follow:\n1. Ask the user what they would like help with\n2. Ask the user for their email address and let them know someone will contact them soon.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Asking the user their issue\n- Getting their email\n\nāŒ Out of Scope:\n- Questions unrelated to customer support\n- If a question is out of scope, politely inform the user and avoid providing an answer.\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- ask user their issue\n\nāŒ Don'ts:\n- don't ask user any other detail than email", - "model": "gpt-4o-2024-08-06", - "controlType": "retain", - "tools": [] - }] - - name: AGENT_CONNECTIONS - value: '{}' + key: PROVIDER_DEFAULT_MODEL resources: - limits: - cpu: "1" - memory: "1Gi" requests: - cpu: "500m" memory: "512Mi" + cpu: "500m" + limits: + memory: "1Gi" + cpu: "1000m" --- apiVersion: v1 kind: Service @@ -95,4 +72,4 @@ spec: ports: - port: 3001 targetPort: 3001 - type: ClusterIP \ No newline at end of file + type: ClusterIP diff --git a/k8s/arm64/start_up.sh b/k8s/arm64/start_up.sh index f79696839..825f0b0c5 100755 --- a/k8s/arm64/start_up.sh +++ b/k8s/arm64/start_up.sh @@ -2,10 +2,10 @@ kubectl apply -f app-config.yaml kubectl apply -f app-secrets.yaml kubectl apply -f openai-secret.yaml kubectl apply -f copilot-deployment.yaml -kubectl apply -f mongodb-vpc.yaml -kubectl apply -f mongodb-deployment.yaml +#kubectl apply -f mongodb-vpc.yaml +#kubectl apply -f mongodb-deployment.yaml kubectl apply -f redis-deployment.yaml #kubectl apply -f rowboat-agents-go-deployment.yaml -kubectl apply -f rowboat_agents-deployment.yaml +kubectl apply -f rowboat-agents-deployment.yaml sleep 4 kubectl apply -f rowboat-deployment.yaml diff --git a/litellm/Dockerfile b/litellm/Dockerfile new file mode 100644 index 000000000..7c2f15ac2 --- /dev/null +++ b/litellm/Dockerfile @@ -0,0 +1,34 @@ +# Use Python 3.11 slim image for ARM64 +FROM --platform=linux/arm64 python:3.11-slim + +# Set working directory +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + build-essential \ + && rm -rf /var/lib/apt/lists/* + +# Install Python packages +RUN pip install --no-cache-dir litellm[proxy] prisma + +# Create directory for custom handler +RUN mkdir -p /app/custom_handlers + +# Copy custom handler +COPY handler/custom_handler.py /app/custom_handlers/ +RUN touch /app/custom_handlers/__init__.py + +# Copy config and schema +COPY config.yaml /app/ +COPY schema.prisma /app/ + +# Set environment variables +ENV PYTHONPATH=/app +ENV PYTHONUNBUFFERED=1 + +# Expose the port +EXPOSE 4000 + +# Run LiteLLM +CMD ["litellm", "--config", "/app/config.yaml"] \ No newline at end of file diff --git a/litellm/README.md b/litellm/README.md new file mode 100644 index 000000000..3d0682898 --- /dev/null +++ b/litellm/README.md @@ -0,0 +1,539 @@ +# LiteLLM Modal Handler + +This project provides integration between LiteLLM and Modal's Ollama server, enabling seamless use of various LLM models through Modal's infrastructure. + +## Project Structure + +The custom handler should be placed in your LiteLLM project directory as follows: + +``` +your-litellm-project/ +ā”œā”€ā”€ litellm/ +│ ā”œā”€ā”€ custom_handler.py # Place the Modal handler here +│ └── __init__.py +ā”œā”€ā”€ config.yaml +└── requirements.txt +``` + +To set this up: + +1. Create the `litellm` directory if it doesn't exist: +```bash +mkdir -p litellm +``` + +2. Copy the custom handler file: +```bash +cp handler/custom_handler.py litellm/ +``` + +3. Create an empty `__init__.py` file: +```bash +touch litellm/__init__.py +``` + +This structure ensures that the custom handler is properly recognized by LiteLLM when referenced in the config.yaml as `litellm.custom_handler.modal_llm`. + +## Features + +- Integration with Modal's Ollama server +- Support for multiple LLM models including: + - Llama4 (new!) + - Gemma3 + - Other Ollama-compatible models +- Asynchronous and streaming support +- Tool calling capabilities +- Comprehensive error handling and logging +- Support for both synchronous and asynchronous operations + +## Dependencies + +The handler requires the following Python packages: + +```bash +pip install litellm requests aiohttp python-dotenv typing-extensions asyncio uuid +``` + +## Environment Variables + +The handler requires the following environment variables to be set: + +```bash +export TOKEN_ID="your_modal_token_id" +export TOKEN_SECRET="your_modal_token_secret" +``` + +## Installation + +1. Clone the repository: +```bash +git clone https://github.com/stevef1uk/modal_ollama_gemma3_gradio.git +cd modal_ollama_gemma3_gradio +``` + +2. Install dependencies: +```bash +pip install -r requirements.txt +``` + +3. Set up Modal and Ollama: +Follow the instructions in the [Modal Ollama Setup Guide](https://github.com/stevef1uk/modal_ollama_gemma3_gradio) to: +- Set up a Modal account +- Deploy the Ollama server +- Install the Llama4 model + +## Configuration + +### Customizing the Modal API Base URL + +By default, the handler uses a specific Modal API base URL. To use your own Modal deployment, you can customize the API base URL in two ways: + +1. When initializing the handler: +```python +from litellm.custom_handler import modal_llm + +# Initialize with custom API base +modal_llm = modal_llm(api_base="https://your-username--ollama-api-api.modal.run") +``` + +2. Or by setting the environment variable: +```bash +export MODAL_API_BASE="https://your-username--ollama-api-api.modal.run" +``` + +Replace `your-username` with your Modal username. The URL format should be: +`https://{username}--ollama-api-api.modal.run` + +### Configuring in LiteLLM config.yaml + +To use the Modal handler with LiteLLM's configuration system, add the following to your `config.yaml`: + +```yaml +# Model configuration +model_list: + - model_name: "modal/llama4" + litellm_params: + model: "modal/llama4" + api_base: "https://your-username--ollama-api-api.modal.run" + provider: "modal" + +# Optional: Set default model for unspecified requests +default_model: llama4 + +# Server configurations +server: + port: 4000 + host: 0.0.0.0 + +# General settings for pass-through endpoints +general_settings: + pass_through_endpoints: + - path: "/modal/tensorrt" + target: "https://your-username--ollama-api-api.modal.run" + headers: + Modal-Key: os.environ/TOKEN_ID + Modal-Secret: os.environ/TOKEN_SECRET + content-type: application/json + accept: application/json + forward_headers: true # Forward all headers from the incoming request + +# Register custom provider +litellm_settings: + custom_provider_map: + - provider: "modal" + custom_handler: "litellm.custom_handler.modal_llm" +``` + +Then you can use it with LiteLLM like this: + +```python +from litellm import completion + +# Using Llama4 +response = completion( + model="modal/llama4", + messages=[ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "What is the capital of France?"} + ] +) +``` + +The configuration supports all features including: +- Tool calling +- Streaming +- Async completion +- Custom parameters (temperature, max_tokens, etc.) + +Key points about the configuration: +1. The `provider` field must be set to "modal" +2. The `custom_provider_map` links the "modal" provider to our custom handler +3. Environment variables `TOKEN_ID` and `TOKEN_SECRET` are used for authentication +4. The pass-through endpoint configuration allows direct access to the Modal API + +## Usage + +### Basic Completion + +```python +from litellm.custom_handler import modal_llm + +# Basic completion +response = modal_llm.completion( + model="modal/llama4", # Use Llama4 model + messages=[ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "What is the capital of France?"} + ], + temperature=0.7 +) +``` + +### Async Completion + +```python +async def get_completion(): + response = await modal_llm.acompletion( + model="modal/llama4", + messages=[ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "What is the capital of France?"} + ], + temperature=0.7 + ) + return response +``` + +### Streaming + +```python +async def stream_completion(): + async for chunk in modal_llm.astreaming( + model="modal/llama4", + messages=[ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "What is the capital of France?"} + ], + temperature=0.7 + ): + print(chunk) +``` + +### Tool Calling + +```python +tools = [ + { + "function": { + "name": "get_weather", + "description": "Get the current weather in a given location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA" + } + }, + "required": ["location"] + } + } + } +] + +response = modal_llm.completion( + model="modal/llama4", + messages=[ + {"role": "system", "content": "You are a helpful assistant that uses tools when available."}, + {"role": "user", "content": "What's the weather like in London?"} + ], + tools=tools, + temperature=0.7 +) +``` + +## Testing + +The handler includes a comprehensive test suite. To run the tests: + +```bash +python test_modal_handler.py +``` + +The test suite includes: +- Structured tool calling tests +- Response format validation +- Regular completion tests +- Async completion tests +- Streaming tests + +### Example Test Outputs + +Here's what you can expect when running the tests: + +#### 1. Structured Tool Calling Test +```json +{ + "id": "modal-response", + "choices": [ + { + "message": { + "role": "assistant", + "content": null, + "tool_calls": [ + { + "function": { + "name": "get_weather", + "arguments": "{\"location\": \"London\"}" + }, + "id": "call_123", + "type": "function" + } + ] + }, + "finish_reason": "tool_calls" + } + ], + "created": 0, + "model": "modal/llama4", + "usage": {} +} +``` + +#### 2. Regular Completion Test +```json +{ + "id": "modal-response", + "choices": [ + { + "message": { + "role": "assistant", + "content": "The capital of France is Paris." + }, + "finish_reason": "stop" + } + ], + "created": 0, + "model": "modal/llama4", + "usage": {} +} +``` + +#### 3. Async Completion Test +```json +{ + "id": "modal-response", + "choices": [ + { + "message": { + "role": "assistant", + "content": "The capital of France is Paris. I can confirm this using my available information. Would you like to know more about Paris or France in general?" + }, + "finish_reason": "stop" + } + ], + "created": 0, + "model": "modal/llama4", + "usage": {} +} +``` + +#### 4. Streaming Test +```json +// First chunk +{ + "text": "The capita", + "is_finished": false, + "finish_reason": null, + "usage": { + "prompt_tokens": 34, + "completion_tokens": 6, + "total_tokens": 40 + } +} + +// Middle chunk +{ + "text": "l of Franc", + "is_finished": false, + "finish_reason": null, + "usage": { + "prompt_tokens": 34, + "completion_tokens": 6, + "total_tokens": 40 + } +} + +// Final chunk +{ + "text": "e is Paris.", + "is_finished": true, + "finish_reason": "stop", + "usage": { + "prompt_tokens": 34, + "completion_tokens": 6, + "total_tokens": 40 + } +} +``` + +## Running in Kubernetes + +To run the LiteLLM proxy in a Kubernetes cluster, you'll need to set up the following environment variables and configuration. + +### Environment Variables + +```bash +# API Keys +export OPENAI_API_KEY="your-openai-api-key" +export GROQ_API_KEY="your-groq-api-key" +export LITELLM_MASTER_KEY="your-litellm-master-key" +export LITELLM_SALT_KEY="your-litellm-salt-key" + +# Database Configuration +# For local development: +export DATABASE_URL="postgres://username:password@db:5432/database_name" +# For Kubernetes (adjust based on your cluster's PostgreSQL service): +export DATABASE_URL="postgres://username:password@postgres-service:5432/database_name" +``` + +### Running the Proxy + +To start the LiteLLM proxy with your Modal configuration: + +```bash +litellm --config config.yaml +``` + +### Kubernetes Deployment + +Here's an example Kubernetes deployment configuration: + +```yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: litellm-proxy +spec: + replicas: 1 + selector: + matchLabels: + app: litellm-proxy + template: + metadata: + labels: + app: litellm-proxy + spec: + containers: + - name: litellm-proxy + image: your-litellm-image:tag + env: + - name: OPENAI_API_KEY + valueFrom: + secretKeyRef: + name: litellm-secrets + key: openai-api-key + - name: GROQ_API_KEY + valueFrom: + secretKeyRef: + name: litellm-secrets + key: groq-api-key + - name: LITELLM_MASTER_KEY + valueFrom: + secretKeyRef: + name: litellm-secrets + key: litellm-master-key + - name: LITELLM_SALT_KEY + valueFrom: + secretKeyRef: + name: litellm-secrets + key: litellm-salt-key + - name: DATABASE_URL + valueFrom: + secretKeyRef: + name: litellm-secrets + key: database-url + - name: TOKEN_ID + valueFrom: + secretKeyRef: + name: modal-secrets + key: token-id + - name: TOKEN_SECRET + valueFrom: + secretKeyRef: + name: modal-secrets + key: token-secret + ports: + - containerPort: 4000 + command: ["litellm"] + args: ["--config", "/app/config.yaml"] + volumeMounts: + - name: config-volume + mountPath: /app/config.yaml + subPath: config.yaml + volumes: + - name: config-volume + configMap: + name: litellm-config +--- +apiVersion: v1 +kind: Service +metadata: + name: litellm-proxy +spec: + selector: + app: litellm-proxy + ports: + - port: 4000 + targetPort: 4000 + type: ClusterIP +``` + +### Kubernetes Secrets + +Create the necessary secrets for your deployment: + +```bash +# Create secrets for LiteLLM +kubectl create secret generic litellm-secrets \ + --from-literal=openai-api-key='your-openai-api-key' \ + --from-literal=groq-api-key='your-groq-api-key' \ + --from-literal=litellm-master-key='your-litellm-master-key' \ + --from-literal=litellm-salt-key='your-litellm-salt-key' \ + --from-literal=database-url='postgres://username:password@postgres-service:5432/database_name' + +# Create secrets for Modal +kubectl create secret generic modal-secrets \ + --from-literal=token-id='your-modal-token-id' \ + --from-literal=token-secret='your-modal-token-secret' +``` + +### ConfigMap + +Create a ConfigMap for your config.yaml: + +```bash +kubectl create configmap litellm-config --from-file=config.yaml +``` + +### Important Notes + +1. Update the PostgreSQL connection string to point to your cluster's PostgreSQL service +2. Store sensitive information in Kubernetes secrets +3. Ensure your cluster has access to the Modal API endpoint +4. Consider using a service mesh or ingress controller for external access +5. Monitor the proxy's logs and metrics in your cluster + +## Related Resources + +- [Modal Ollama Setup Guide](https://github.com/stevef1uk/modal_ollama_gemma3_gradio) - Instructions for setting up Ollama and Llama4 on Modal +- [LiteLLM Documentation](https://docs.litellm.ai/) - Complete documentation for LiteLLM +- [Modal Documentation](https://modal.com/docs) - Modal's official documentation + +## Contributing + +Contributions are welcome! Please feel free to submit a Pull Request. + +## License + +This project is licensed under the MIT License - see the LICENSE file for details. diff --git a/litellm/build.sh b/litellm/build.sh new file mode 100755 index 000000000..9c9b4faac --- /dev/null +++ b/litellm/build.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +# Set variables +IMAGE_NAME="litellm-proxy" +VERSION="1.0.0" +REGISTRY="stevef1uk" + +# Build the image for ARM64 +echo "Building image for ARM64..." +docker build --platform linux/arm64 -t ${REGISTRY}/${IMAGE_NAME}:${VERSION} . + +# Try to push the image +echo "Pushing image to Docker Hub..." +if ! docker push ${REGISTRY}/${IMAGE_NAME}:${VERSION}; then + echo "Failed to push image. Please make sure you're logged in to Docker Hub:" + echo "docker login" + exit 1 +fi + +# Also tag as latest +echo "Tagging as latest..." +docker tag ${REGISTRY}/${IMAGE_NAME}:${VERSION} ${REGISTRY}/${IMAGE_NAME}:latest +docker push ${REGISTRY}/${IMAGE_NAME}:latest + +echo "Done!" \ No newline at end of file diff --git a/litellm/config.yaml b/litellm/config.yaml new file mode 100644 index 000000000..662c2ab53 --- /dev/null +++ b/litellm/config.yaml @@ -0,0 +1,70 @@ +model_list: + - model_name: "gpt-4.1" + litellm_params: + model: "gpt-4.1-2025-04-14" + api_key: os.environ/OPENAI_API_KEY + + - model_name: "gpt-4o" + litellm_params: + model: "gpt-4o-mini-2024-07-18" + api_key: os.environ/OPENAI_API_KEY + + - model_name: "gpt-3.5-turbo" + litellm_params: + model: "gpt-3.5-turbo" + api_key: os.environ/OPENAI_API_KEY + + - model_name: ollama/gemma3:12b + litellm_params: + model: ollama/gemma3:12b + api_base: http://192.168.1.53:11434 + + - model_name: ollama/DeepSeek-R1:14b + litellm_params: + model: ollama/DeepSeek-R1:14b + api_base: http://192.168.1.53:11434 + + - model_name: "modal/llama3.3" + litellm_params: + model: "modal/llama3.3:70b" + api_base: "https://stevef1uk--ollama-api-api.modal.run" + provider: "modal" + + - model_name: "modal/llama4" + litellm_params: + model: "modal/llama4" + api_base: "https://stevef1uk--ollama-api-api.modal.run" + provider: "modal" + + - model_name: groq-llama-3.3-70b-versatile + litellm_params: + model: groq/llama-3.3-70b-versatile + api_key: "os.environ/GROQ_API_KEY" + +# Optional: Set default model for unspecified requests +default_model: gpt-4o + +proxy_server_requires_api_key: false + +# Server configurations +server: + port: 4000 + host: 0.0.0.0 + +# General settings for pass-through endpoints +general_settings: + pass_through_endpoints: + - path: "/modal/tensorrt" + target: "https://stevef1uk--ollama-api-api.modal.run" + headers: + Modal-Key: os.environ/TOKEN_ID + Modal-Secret: os.environ/TOKEN_SECRET + content-type: application/json + accept: application/json + forward_headers: true # Forward all headers from the incoming request + +# Register custom provider +litellm_settings: + custom_provider_map: + - provider: "modal" + custom_handler: "custom_handlers.custom_handler.modal_llm" diff --git a/litellm/k8s/configmap.yaml b/litellm/k8s/configmap.yaml new file mode 100644 index 000000000..1d564d9f2 --- /dev/null +++ b/litellm/k8s/configmap.yaml @@ -0,0 +1,80 @@ +apiVersion: v1 +kind: ConfigMap +metadata: + name: litellm-config +data: + config.yaml: | + model_list: + - model_name: "gpt-4.1" + litellm_params: + model: "gpt-4.1-2025-04-14" + api_key: "os.environ/OPENAI_API_KEY" + timeout: 30000 + + - model_name: "gpt-4o" + litellm_params: + model: "gpt-4o" + api_key: "os.environ/OPENAI_API_KEY" + timeout: 30000 + + - model_name: "gpt-3.5-turbo" + litellm_params: + model: "gpt-3.5-turbo" + api_key: "os.environ/OPENAI_API_KEY" + timeout: 30000 + + - model_name: "modal/llama4" + litellm_params: + model: "modal/llama4" + api_base: "https://stevef1uk--ollama-api-api.modal.run" + provider: "modal" + + - model_name: "modal/llama3.3" + litellm_params: + model: "modal/llama3.3:70b" + api_base: "https://stevef1uk--ollama-api-api.modal.run" + provider: "modal" + + - model_name: "groq-llama-3.3-70b-versatile" + litellm_params: + model: "groq/llama-3.3-70b-versatile" + api_key: "os.environ/GROQ_API_KEY" + + # Optional: Set default model for unspecified requests + default_model: gpt-4o + + # Server configurations + server: + port: 4000 + host: 0.0.0.0 + + # Authentication settings + proxy_server_requires_api_key: false + ui_username: "admin" + ui_password: "admin" + skip_auth_paths: ["/health"] + master_key: "os.environ/LITELLM_MASTER_KEY" + + # General settings for pass-through endpoints + general_settings: + pass_through_endpoints: + - path: "/modal/tensorrt" + target: "https://stevef1uk--ollama-api-api.modal.run" + headers: + Modal-Key: "os.environ/TOKEN_ID" + Modal-Secret: "os.environ/TOKEN_SECRET" + content-type: application/json + accept: application/json + forward_headers: true + + # Register custom provider + litellm_settings: + custom_provider_map: + - provider: "modal" + custom_handler: "custom_handlers.custom_handler.ModalLLM" + debug: true + timeout: 30000 + track_cost: false + success_callback: [] + failure_callback: [] + cost_callback: [] \ No newline at end of file diff --git a/litellm/k8s/deployment.yaml b/litellm/k8s/deployment.yaml new file mode 100644 index 000000000..08772b152 --- /dev/null +++ b/litellm/k8s/deployment.yaml @@ -0,0 +1,123 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: litellm-proxy + labels: + app: litellm-proxy +spec: + replicas: 1 + selector: + matchLabels: + app: litellm-proxy + template: + metadata: + labels: + app: litellm-proxy + spec: + containers: + - name: litellm-proxy + image: stevef1uk/litellm-proxy:latest + env: + # Modal credentials + - name: TOKEN_ID + valueFrom: + secretKeyRef: + name: litellm-secrets + key: token-id + - name: TOKEN_SECRET + valueFrom: + secretKeyRef: + name: litellm-secrets + key: token-secret + # LiteLLM specific secrets + - name: OPENAI_API_KEY + valueFrom: + secretKeyRef: + name: litellm-secrets + key: openai-api-key + - name: GROQ_API_KEY + valueFrom: + secretKeyRef: + name: litellm-secrets + key: groq-api-key + - name: LITELLM_MASTER_KEY + valueFrom: + secretKeyRef: + name: litellm-secrets + key: litellm-master-key + - name: LITELLM_SALT_KEY + valueFrom: + secretKeyRef: + name: litellm-secrets + key: litellm-salt-key + # UI credentials + - name: UI_USERNAME + value: "admin" + - name: UI_PASSWORD + valueFrom: + secretKeyRef: + name: litellm-secrets + key: ui-password + # Database configuration + - name: DATABASE_URL + valueFrom: + secretKeyRef: + name: litellm-secrets + key: database-url + # Additional settings + - name: LITELLM_VERIFY_TOKEN + value: "false" + - name: LITELLM_TOKEN_DB_SYNC + value: "false" + - name: LITELLM_UI_PASSWORD + valueFrom: + secretKeyRef: + name: litellm-secrets + key: ui-password + - name: LITELLM_UI_USERNAME + value: "admin" + - name: PRISMA_CLIENT_CONNECTION_TIMEOUT + value: "30000" + - name: PRISMA_CLIENT_QUERY_TIMEOUT + value: "30000" + # OpenAI specific settings + - name: OPENAI_API_BASE + value: "https://api.openai.com/v1" + - name: OPENAI_API_VERSION + value: "2024-02-15-preview" + - name: OPENAI_API_TYPE + value: "openai" + - name: OPENAI_API_TIMEOUT + value: "30000" + - name: LITELLM_TIMEOUT + value: "30000" + - name: LITELLM_DEBUG + value: "true" + volumeMounts: + - name: config-volume + mountPath: /app/config.yaml + subPath: config.yaml + ports: + - containerPort: 4000 + readinessProbe: + httpGet: + path: /health + port: 4000 + httpHeaders: + - name: Authorization + value: Bearer sk-ZpzWhDlT-GZIqhnW8JcqHw + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 3 + resources: + requests: + memory: "512Mi" + cpu: "250m" + limits: + memory: "1Gi" + cpu: "500m" + volumes: + - name: config-volume + configMap: + name: litellm-config \ No newline at end of file diff --git a/litellm/k8s/kustomization.yaml b/litellm/k8s/kustomization.yaml new file mode 100644 index 000000000..8b20d412e --- /dev/null +++ b/litellm/k8s/kustomization.yaml @@ -0,0 +1,11 @@ +apiVersion: kustomize.config.k8s.io/v1beta1 +kind: Kustomization + +resources: + - deployment.yaml + - service.yaml + - secrets.yaml + - configmap.yaml + +commonLabels: + app: litellm-proxy \ No newline at end of file diff --git a/litellm/k8s/secrets.yaml b/litellm/k8s/secrets.yaml new file mode 100644 index 000000000..40066b831 --- /dev/null +++ b/litellm/k8s/secrets.yaml @@ -0,0 +1,30 @@ +apiVersion: v1 +kind: Secret +metadata: + name: litellm-secrets +type: Opaque +data: + token-id: # Base 64 Modal key + token-secret: #Base 64 Modal Secret + openai-api-key: #Base 64 Open AI Key + groq-api-key: #Base 64 Groq key + litellm-master-key: #Base 64 Open AI Key + litellm-salt-key: c2stMTIzNA== + postgres-password: c2VjcmV0 + ui-password: #Base 64 Postgres Password + db-user-pwd: #Base 64 Postgres User + database-url: #Base 64 Postgres connection string + agents-api-key: c2stemVlT2I0cFVzRWlkYkhDNWZlZHZUM0JsYmtGSllGYUlNNno4anJadEpCeEJ5YlU5 + copilot-api-key: #Base 64 OpenAI Key + qdrant-url: aHR0cDovL2xvY2FsaG9zdDo2MzMz + qdrant-api-key: + aws-access-key-id: + aws-secret-access-key: + rag-uploads-s3-bucket: + rag-uploads-s3-region: + firecrawl-api-key: + chat-widget-session-jwt-secret: + provider-api-key: + auth0-client-id: + auth0-client-secret: + auth0-secret: \ No newline at end of file diff --git a/litellm/k8s/service.yaml b/litellm/k8s/service.yaml new file mode 100644 index 000000000..7003cd055 --- /dev/null +++ b/litellm/k8s/service.yaml @@ -0,0 +1,14 @@ +apiVersion: v1 +kind: Service +metadata: + name: litellm-proxy + labels: + app: litellm-proxy +spec: + selector: + app: litellm-proxy + ports: + - port: 4000 + targetPort: 4000 + name: http + type: ClusterIP \ No newline at end of file diff --git a/litellm/litellm/__init__.py b/litellm/litellm/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/litellm/litellm/custom_handler.py b/litellm/litellm/custom_handler.py new file mode 100644 index 000000000..c05e74629 --- /dev/null +++ b/litellm/litellm/custom_handler.py @@ -0,0 +1,917 @@ +import litellm +import requests +import os +from typing import Optional, Union, Dict, Any, AsyncGenerator +import logging +import sys +import json +import asyncio +import aiohttp +import uuid +import re + +# Set up logging +logging.basicConfig(level=logging.WARNING) +logger = logging.getLogger(__name__) + +logger.debug("Loading custom_handler.py") +logger.debug(f"Python path: {sys.path}") + +class ModalLLM: + def __init__(self, api_base: Optional[str] = None): + logger.debug("Initializing ModalLLM") + self.api_base = api_base or "https://stevef1uk--ollama-api-api.modal.run" + logger.debug(f"Using API base: {self.api_base}") + # Log environment variables at initialization + logger.debug(f"Environment variables at init - TOKEN_ID: {os.getenv('TOKEN_ID')}, TOKEN_SECRET: {os.getenv('TOKEN_SECRET')}") + + def completion( + self, + model: str, + messages: list, + model_response: Optional[Dict] = None, + optional_params: Optional[Dict[str, Any]] = None, + **kwargs + ) -> Dict: + logger.debug("Starting completion request") + logger.debug(f"Model: {model}") + logger.debug(f"Messages: {messages}") + logger.debug(f"kwargs: {kwargs}") + logger.debug(f"optional_params: {optional_params}") + + # Get credentials from kwargs or environment variables + token = kwargs.get("api_key") or os.getenv("TOKEN_ID") + secret = kwargs.get("token_secret") or os.getenv("TOKEN_SECRET") + + logger.debug(f"Using token: {token[:8]}..." if token else "No token found") + logger.debug(f"Using secret: {secret[:8]}..." if secret else "No secret found") + + # Check if this is a proxy request + if "authorization" in kwargs: + logger.debug("Detected proxy request") + logger.debug(f"Authorization header: {kwargs['authorization']}") + # Extract credentials from proxy authorization + auth_header = kwargs["authorization"] + if auth_header.startswith("Bearer "): + auth_token = auth_header[7:] # Remove "Bearer " prefix + logger.debug(f"Auth token after removing Bearer: {auth_token}") + # If this is a LiteLLM virtual key, use environment variables + if auth_token.startswith("sk-"): + logger.debug("Detected LiteLLM virtual key") + # For proxy requests with virtual keys, use environment variables + token = os.getenv("TOKEN_ID") + secret = os.getenv("TOKEN_SECRET") + if not token or not secret: + error_msg = "Modal credentials not found in environment variables" + logger.error(error_msg) + raise ValueError(error_msg) + logger.debug(f"Using Modal credentials from environment - Token: {token[:8]}..., Secret: {secret[:8]}...") + else: + # Try to split the token into key and secret + if ":" in auth_token: + token, secret = auth_token.split(":", 1) + logger.debug("Extracted credentials from proxy authorization with key:secret format") + logger.debug(f"Token: {token[:8]}..., Secret: {secret[:8]}...") + else: + # If no colon, use the token as is + token = auth_token + logger.debug("Using token directly from proxy authorization") + logger.debug(f"Token: {token[:8]}...") + else: + logger.error("Invalid proxy authorization format") + raise ValueError("Invalid proxy authorization format") + else: + logger.debug("No authorization header found in kwargs") + logger.debug("Available kwargs keys: " + str(list(kwargs.keys()))) + + if not token: + error_msg = "Missing token. Please provide a valid token." + logger.error(error_msg) + raise ValueError(error_msg) + + headers = { + "Content-Type": "application/json", + "Modal-Key": token + } + + # Only add Modal-Secret if we have it + if secret: + headers["Modal-Secret"] = secret + + logger.debug(f"Final headers: {headers}") + + # Extract model name from the input model string + # Handle format like "modal/mistral:latest" -> "mistral:latest" + if "/" in model: + model_name = model.split("/", 1)[1] # Get everything after the first / + else: + model_name = model + logger.debug(f"Extracted model name: {model_name}") + + # Combine all messages into a single prompt + prompt = "" + for msg in messages: + if msg["role"] == "system": + prompt += msg["content"] + "\n\n" + elif msg["role"] == "user": + prompt += "User: " + msg["content"] + "\n" + elif msg["role"] == "assistant": + prompt += "Assistant: " + msg["content"] + "\n" + prompt = prompt.strip() + logger.debug(f"Combined prompt: {prompt}") + + # Prepare the payload + payload = { + "prompt": prompt, + "temperature": 0.7, + "model": model_name + } + + # Add tools if provided + if "tools" in kwargs: + tools = kwargs["tools"] + logger.debug(f"Tools provided: {tools}") + # Include tools in the prompt instead of as a separate parameter + tool_descriptions = [] + for tool in tools: + if "function" in tool: + tool_desc = f"- {tool['function']['name']}: {tool['function']['description']}" + if "parameters" in tool["function"]: + params = tool["function"]["parameters"] + if "properties" in params: + param_desc = [] + for param_name, param_info in params["properties"].items(): + param_desc.append(f"{param_name} ({param_info.get('type', 'any')})") + tool_desc += f" Parameters: {', '.join(param_desc)}" + tool_descriptions.append(tool_desc) + + if tool_descriptions: + prompt += "\n\nAvailable tools:\n" + "\n".join(tool_descriptions) + prompt += "\n\nYou are a helpful assistant that can use tools. When you need to use a tool, you must respond with a JSON object in this exact format:\n" + prompt += """{ + "role": "assistant", + "content": null, + "tool_calls": [{ + "function": { + "name": "TOOL_NAME", + "arguments": "{\\"param1\\": value1}" + }, + "id": "call_123", + "type": "function" + }], + "tool_call_id": null, + "tool_name": null, + "response_type": "internal" +}""" + + # Add the user's query + for msg in messages: + if msg["role"] == "user": + prompt += f"\n\nUser: {msg['content']}" + break + + # Add explicit instruction to use tools + prompt += "\n\nIMPORTANT: You must use the available tools to help the user. Do not respond with regular text unless you have used all relevant tools first. When using a tool, make sure to use the exact format shown above." + payload["prompt"] = prompt + logger.debug(f"Updated prompt with tools: {prompt}") + + logger.debug(f"Final payload with model: {payload}") + + try: + logger.debug(f"Making request to {self.api_base}") + logger.debug(f"Headers: {headers}") + logger.debug(f"Payload: {payload}") + + # Log the exact curl command that would be used + curl_command = f"""curl -X POST "{self.api_base}" \\ + -H "Content-Type: application/json" \\ + -H "Modal-Key: {token}" \\ + -H "Modal-Secret: {secret}" \\ + -d '{json.dumps(payload)}'""" + logger.debug(f"Equivalent curl command:\n{curl_command}") + + # Use requests.Session for better connection handling + with requests.Session() as session: + try: + logger.debug("Creating session and preparing request") + response = session.post( + self.api_base, + json=payload, + headers=headers, + verify=True, + timeout=60, # Increased timeout + stream=True # Enable streaming + ) + logger.debug("Request completed") + except requests.exceptions.SSLError as e: + logger.error(f"SSL Error: {str(e)}") + raise + except requests.exceptions.ConnectionError as e: + logger.error(f"Connection Error: {str(e)}") + raise + except requests.exceptions.Timeout as e: + logger.error(f"Timeout Error: {str(e)}") + raise + except requests.exceptions.RequestException as e: + logger.error(f"Request Exception: {str(e)}") + raise + + logger.debug(f"Response status: {response.status_code}") + logger.debug(f"Response headers: {dict(response.headers)}") + + if response.status_code == 401: + logger.error("Authentication failed. Please check your Modal credentials") + raise Exception("Authentication failed. Please check your Modal credentials") + + response.raise_for_status() + + # Handle streaming response + full_response = "" + for chunk in response.iter_content(chunk_size=None, decode_unicode=True): + if chunk: + logger.debug(f"Received chunk: {chunk}") + full_response += chunk + + logger.debug(f"Full response: {full_response}") + + try: + # Try to parse the response as JSON + response_data = json.loads(full_response) + logger.debug(f"Parsed JSON response: {response_data}") + + # Transform the response to the correct format + transformed_data = transform_to_correct_format(full_response) + if transformed_data: + response_data = transformed_data + + # Extract the response text + if isinstance(response_data, dict): + response_text = response_data.get("response", "") + # Check for tool calls in the response + if "tool_calls" in response_data: + return { + "id": "modal-response", + "choices": [{ + "message": { + "role": "assistant", + "content": response_text, + "tool_calls": response_data["tool_calls"] + }, + "finish_reason": "tool_calls" + }], + "created": 0, + "model": model, + "usage": {} + } + else: + response_text = str(response_data) + except json.JSONDecodeError: + # If not JSON, treat as raw text + logger.debug("Response is not JSON, treating as raw text") + if "assistant" in full_response.lower(): + response_text = full_response.split("assistant", 1)[1].strip() + else: + response_text = full_response.strip() + + # Remove any remaining user prompt + if "user" in response_text.lower(): + response_text = response_text.split("user", 1)[0].strip() + + # Try to transform text response into tool call + transformed_data = transform_to_correct_format(response_text) + if transformed_data and "tool_calls" in transformed_data: + return { + "id": "modal-response", + "choices": [{ + "message": { + "role": "assistant", + "content": "", + "tool_calls": transformed_data["tool_calls"] + }, + "finish_reason": "tool_calls" + }], + "created": 0, + "model": model, + "usage": {} + } + + if not response_text: + logger.error(f"Unexpected response format: {full_response}") + raise Exception("Unexpected response format from Modal API") + + return { + "id": "modal-response", + "choices": [{ + "message": { + "role": "assistant", + "content": response_text + }, + "finish_reason": "stop" + }], + "created": 0, + "model": model, + "usage": {} + } + + except requests.exceptions.RequestException as e: + logger.error(f"Request failed: {str(e)}") + if hasattr(e, 'response') and e.response is not None: + logger.error(f"Response text: {e.response.text}") + raise Exception(f"Error connecting to Modal API: {str(e)}") + + async def acompletion( + self, + model: str, + messages: list, + model_response: Optional[Dict] = None, + optional_params: Optional[Dict[str, Any]] = None, + **kwargs + ) -> Dict: + logger.debug("Starting async completion request") + logger.debug(f"Model: {model}") + logger.debug(f"Messages: {messages}") + logger.debug(f"kwargs: {kwargs}") + logger.debug(f"optional_params: {optional_params}") + + # Get credentials from kwargs or environment variables + token = kwargs.get("api_key") or os.getenv("TOKEN_ID") + secret = kwargs.get("token_secret") or os.getenv("TOKEN_SECRET") + + logger.debug(f"Using token: {token[:8]}..." if token else "No token found") + logger.debug(f"Using secret: {secret[:8]}..." if secret else "No secret found") + + # Check if this is a proxy request + if "authorization" in kwargs: + logger.debug("Detected proxy request") + logger.debug(f"Authorization header: {kwargs['authorization']}") + # Extract credentials from proxy authorization + auth_header = kwargs["authorization"] + if auth_header.startswith("Bearer "): + auth_token = auth_header[7:] # Remove "Bearer " prefix + logger.debug(f"Auth token after removing Bearer: {auth_token}") + # If this is a LiteLLM virtual key, use environment variables + if auth_token.startswith("sk-"): + logger.debug("Detected LiteLLM virtual key") + # For proxy requests with virtual keys, use environment variables + token = os.getenv("TOKEN_ID") + secret = os.getenv("TOKEN_SECRET") + if not token or not secret: + error_msg = "Modal credentials not found in environment variables" + logger.error(error_msg) + raise ValueError(error_msg) + logger.debug(f"Using Modal credentials from environment - Token: {token[:8]}..., Secret: {secret[:8]}...") + else: + # Try to split the token into key and secret + if ":" in auth_token: + token, secret = auth_token.split(":", 1) + logger.debug("Extracted credentials from proxy authorization with key:secret format") + logger.debug(f"Token: {token[:8]}..., Secret: {secret[:8]}...") + else: + # If no colon, use the token as is + token = auth_token + logger.debug("Using token directly from proxy authorization") + logger.debug(f"Token: {token[:8]}...") + else: + logger.error("Invalid proxy authorization format") + raise ValueError("Invalid proxy authorization format") + else: + logger.debug("No authorization header found in kwargs") + logger.debug("Available kwargs keys: " + str(list(kwargs.keys()))) + + if not token: + error_msg = "Missing token. Please provide a valid token." + logger.error(error_msg) + raise ValueError(error_msg) + + headers = { + "Content-Type": "application/json", + "Modal-Key": token + } + + # Only add Modal-Secret if we have it + if secret: + headers["Modal-Secret"] = secret + + logger.debug(f"Final headers: {headers}") + + # Extract model name from the input model string + # Handle format like "modal/mistral:latest" -> "mistral:latest" + if "/" in model: + model_name = model.split("/", 1)[1] # Get everything after the first / + else: + model_name = model + logger.debug(f"Extracted model name: {model_name}") + + # Combine all messages into a single prompt + prompt = "" + for msg in messages: + if msg["role"] == "system": + prompt += msg["content"] + "\n\n" + elif msg["role"] == "user": + prompt += "User: " + msg["content"] + "\n" + elif msg["role"] == "assistant": + prompt += "Assistant: " + msg["content"] + "\n" + prompt = prompt.strip() + logger.debug(f"Combined prompt: {prompt}") + + # Prepare the payload + payload = { + "prompt": prompt, + "temperature": 0.7, + "model": model_name + } + + # Add tools if provided + if "tools" in kwargs: + tools = kwargs["tools"] + logger.debug(f"Tools provided: {tools}") + # Include tools in the prompt instead of as a separate parameter + tool_descriptions = [] + for tool in tools: + if "function" in tool: + tool_desc = f"- {tool['function']['name']}: {tool['function']['description']}" + if "parameters" in tool["function"]: + params = tool["function"]["parameters"] + if "properties" in params: + param_desc = [] + for param_name, param_info in params["properties"].items(): + param_desc.append(f"{param_name} ({param_info.get('type', 'any')})") + tool_desc += f" Parameters: {', '.join(param_desc)}" + tool_descriptions.append(tool_desc) + + if tool_descriptions: + prompt += "\n\nAvailable tools:\n" + "\n".join(tool_descriptions) + prompt += "\n\nYou are a helpful assistant that can use tools. When you need to use a tool, you must respond with a JSON object in this exact format:\n" + prompt += """{ + "role": "assistant", + "content": null, + "tool_calls": [{ + "function": { + "name": "TOOL_NAME", + "arguments": "{\\"param1\\": value1}" + }, + "id": "call_123", + "type": "function" + }], + "tool_call_id": null, + "tool_name": null, + "response_type": "internal" +}""" + + # Add the user's query + for msg in messages: + if msg["role"] == "user": + prompt += f"\n\nUser: {msg['content']}" + break + + # Add explicit instruction to use tools + prompt += "\n\nIMPORTANT: You must use the available tools to help the user. Do not respond with regular text unless you have used all relevant tools first. When using a tool, make sure to use the exact format shown above." + payload["prompt"] = prompt + logger.debug(f"Updated prompt with tools: {prompt}") + + logger.debug(f"Final payload with model: {payload}") + + try: + logger.debug(f"Making async request to {self.api_base}") + logger.debug(f"Headers: {headers}") + logger.debug(f"Payload: {payload}") + + # Use aiohttp for async requests + async with aiohttp.ClientSession() as session: + try: + logger.debug("Creating session and preparing request") + async with session.post( + self.api_base, + json=payload, + headers=headers, + ssl=True, + timeout=60, # Increased timeout + ) as response: + logger.debug("Request completed") + logger.debug(f"Response status: {response.status}") + logger.debug(f"Response headers: {dict(response.headers)}") + + if response.status == 401: + logger.error("Authentication failed. Please check your Modal credentials") + raise Exception("Authentication failed. Please check your Modal credentials") + + response.raise_for_status() + + # Read the response + full_response = await response.text() + logger.debug(f"Full response: {full_response}") + + try: + # Try to parse the response as JSON + response_data = json.loads(full_response) + logger.debug(f"Parsed JSON response: {response_data}") + + # Transform the response to the correct format + transformed_data = transform_to_correct_format(full_response) + if transformed_data: + response_data = transformed_data + + # Extract the response text + if isinstance(response_data, dict): + response_text = response_data.get("response", "") + # Check for tool calls in the response + if "tool_calls" in response_data: + return { + "id": "modal-response", + "choices": [{ + "message": { + "role": "assistant", + "content": response_text, + "tool_calls": response_data["tool_calls"] + }, + "finish_reason": "tool_calls" + }], + "created": 0, + "model": model, + "usage": {} + } + else: + response_text = str(response_data) + except json.JSONDecodeError: + # If not JSON, treat as raw text + logger.debug("Response is not JSON, treating as raw text") + if "assistant" in full_response.lower(): + response_text = full_response.split("assistant", 1)[1].strip() + else: + response_text = full_response.strip() + + # Remove any remaining user prompt + if "user" in response_text.lower(): + response_text = response_text.split("user", 1)[0].strip() + + if not response_text: + logger.error(f"Unexpected response format: {full_response}") + raise Exception("Unexpected response format from Modal API") + + return { + "id": "modal-response", + "choices": [{ + "message": { + "role": "assistant", + "content": response_text + }, + "finish_reason": "stop" + }], + "created": 0, + "model": model, + "usage": {} + } + + except aiohttp.ClientError as e: + logger.error(f"Request failed: {str(e)}") + raise Exception(f"Error connecting to Modal API: {str(e)}") + + except Exception as e: + logger.error(f"Unexpected error: {str(e)}") + raise + + async def astreaming( + self, + model: str, + messages: list, + model_response: Optional[Dict] = None, + optional_params: Optional[Dict[str, Any]] = None, + **kwargs + ) -> AsyncGenerator[Dict, None]: + logger.debug("Starting async streaming request") + logger.debug(f"Model: {model}") + logger.debug(f"Messages: {messages}") + logger.debug(f"kwargs: {kwargs}") + logger.debug(f"optional_params: {optional_params}") + + # Get credentials from kwargs or environment variables + token = kwargs.get("api_key") or os.getenv("TOKEN_ID") + secret = kwargs.get("token_secret") or os.getenv("TOKEN_SECRET") + + if not token: + error_msg = "Missing token. Please provide a valid token." + logger.error(error_msg) + raise ValueError(error_msg) + + headers = { + "Content-Type": "application/json", + "Modal-Key": token + } + + if secret: + headers["Modal-Secret"] = secret + + # Extract model name from the input model string + model_name = model.split("/", 1)[1] if "/" in model else model + + # Combine messages into prompt + prompt = "" + for msg in messages: + if msg["role"] == "system": + prompt += msg["content"] + "\n\n" + elif msg["role"] == "user": + prompt += "User: " + msg["content"] + "\n" + elif msg["role"] == "assistant": + prompt += "Assistant: " + msg["content"] + "\n" + prompt = prompt.strip() + + # Prepare the payload + payload = { + "prompt": prompt, + "temperature": 0.7, + "model": model_name + } + + # Add tools if provided + if "tools" in kwargs: + tools = kwargs["tools"] + tool_descriptions = [] + for tool in tools: + if "function" in tool: + tool_desc = f"- {tool['function']['name']}: {tool['function']['description']}" + if "parameters" in tool["function"]: + params = tool["function"]["parameters"] + if "properties" in params: + param_desc = [] + for param_name, param_info in params["properties"].items(): + param_desc.append(f"{param_name} ({param_info.get('type', 'any')})") + tool_desc += f" Parameters: {', '.join(param_desc)}" + tool_descriptions.append(tool_desc) + + if tool_descriptions: + prompt += "\n\nAvailable tools:\n" + "\n".join(tool_descriptions) + prompt += "\n\nYou are a helpful assistant that can use tools. When you need to use a tool, you must respond with a JSON object in this exact format:\n" + prompt += """{ + "role": "assistant", + "content": null, + "tool_calls": [{ + "function": { + "name": "TOOL_NAME", + "arguments": "{\\"param1\\": value1}" + }, + "id": "call_123", + "type": "function" + }] +}""" + payload["prompt"] = prompt + + try: + async with aiohttp.ClientSession() as session: + async with session.post( + self.api_base, + json=payload, + headers=headers, + ssl=True, + timeout=60, + ) as response: + if response.status == 401: + raise Exception("Authentication failed. Please check your Modal credentials") + + response.raise_for_status() + full_response = await response.text() + + try: + # Try to parse the response as JSON + response_data = json.loads(full_response) + + # Check if this is a tool call response + if isinstance(response_data, dict) and "tool_calls" in response_data: + # Yield the tool call response + yield { + "text": "", + "is_finished": True, + "finish_reason": "tool_calls", + "tool_calls": response_data["tool_calls"], + "usage": { + "prompt_tokens": len(prompt.split()), + "completion_tokens": 0, + "total_tokens": len(prompt.split()) + } + } + return + + # If not a tool call, yield the response in chunks + response_text = response_data.get("response", str(response_data)) + chunk_size = 10 + for i in range(0, len(response_text), chunk_size): + chunk = response_text[i:i + chunk_size] + is_last_chunk = i + chunk_size >= len(response_text) + yield { + "text": chunk, + "is_finished": is_last_chunk, + "finish_reason": "stop" if is_last_chunk else None, + "usage": { + "prompt_tokens": len(prompt.split()), + "completion_tokens": len(response_text.split()), + "total_tokens": len(prompt.split()) + len(response_text.split()) + } + } + except json.JSONDecodeError: + # If not JSON, treat as raw text + if "assistant" in full_response.lower(): + response_text = full_response.split("assistant", 1)[1].strip() + else: + response_text = full_response.strip() + + # Remove any remaining user prompt + if "user" in response_text.lower(): + response_text = response_text.split("user", 1)[0].strip() + + # Try to parse as tool call + try: + tool_call_data = json.loads(response_text) + if isinstance(tool_call_data, dict) and "tool_calls" in tool_call_data: + yield { + "text": "", + "is_finished": True, + "finish_reason": "tool_calls", + "tool_calls": tool_call_data["tool_calls"], + "usage": { + "prompt_tokens": len(prompt.split()), + "completion_tokens": 0, + "total_tokens": len(prompt.split()) + } + } + return + except json.JSONDecodeError: + # Not a tool call, yield as regular text + chunk_size = 10 + for i in range(0, len(response_text), chunk_size): + chunk = response_text[i:i + chunk_size] + is_last_chunk = i + chunk_size >= len(response_text) + yield { + "text": chunk, + "is_finished": is_last_chunk, + "finish_reason": "stop" if is_last_chunk else None, + "usage": { + "prompt_tokens": len(prompt.split()), + "completion_tokens": len(response_text.split()), + "total_tokens": len(prompt.split()) + len(response_text.split()) + } + } + except Exception as e: + logger.error(f"Error in astreaming: {str(e)}") + raise + +def transform_to_correct_format(response): + """Transform the response into the correct format.""" + if isinstance(response, str): + response = clean_response(response) + try: + # First try to parse as JSON + try: + data = json.loads(response) + except json.JSONDecodeError: + # If not valid JSON, check if it's a text response that should be a tool call + # Look for tool markers in the text + if "[@tool:" in response: + # Extract tool name and parameters + tool_match = re.search(r'\[@tool:(\w+)\](.*?)(?=\[@tool:|$)', response, re.DOTALL) + if tool_match: + tool_name = tool_match.group(1) + tool_text = tool_match.group(2).strip() + + # Try to extract parameters from the text + # Look for JSON-like structure in the text + json_match = re.search(r'\{.*\}', tool_text, re.DOTALL) + if json_match: + try: + params = json.loads(json_match.group(0)) + except json.JSONDecodeError: + # If not valid JSON, try to parse key-value pairs + params = {} + # Look for key: value or key=value patterns + kv_pairs = re.finditer(r'(\w+)\s*[:=]\s*([^,\n]+)', tool_text) + for match in kv_pairs: + key = match.group(1).strip() + value = match.group(2).strip() + # Try to convert value to appropriate type + try: + if value.lower() == 'true': + params[key] = True + elif value.lower() == 'false': + params[key] = False + elif value.isdigit(): + params[key] = int(value) + elif re.match(r'^-?\d*\.\d+$', value): + params[key] = float(value) + else: + params[key] = value + except ValueError: + params[key] = value + + # Only create tool call if we found parameters + if params: + return { + "role": "assistant", + "content": "", + "tool_calls": [{ + "function": { + "name": tool_name, + "arguments": json.dumps(params) + }, + "id": f"call_{uuid.uuid4().hex[:8]}", + "type": "function" + }], + "tool_call_id": None, + "tool_name": None, + "response_type": "internal" + } + return None + + # Handle legacy format with name/parameters + if "tool_calls" in data: + tool_calls = [] + for tool_call in data["tool_calls"]: + if "name" in tool_call and "parameters" in tool_call: + tool_calls.append({ + "function": { + "name": tool_call["name"], + "arguments": json.dumps(tool_call["parameters"]) + }, + "id": f"call_{uuid.uuid4().hex[:8]}", + "type": "function" + }) + return { + "role": "assistant", + "content": "", + "tool_calls": tool_calls, + "tool_call_id": None, + "tool_name": None, + "response_type": "internal" + } + + # If the response is already in the correct format, just fix arguments + if all(key in data for key in ["role", "content", "tool_calls", "tool_call_id", "tool_name", "response_type"]): + # Convert null content to empty string + if data["content"] is None: + data["content"] = "" + for tool_call in data["tool_calls"]: + if isinstance(tool_call["function"]["arguments"], dict): + tool_call["function"]["arguments"] = json.dumps(tool_call["function"]["arguments"]) + return data + + # If tool_calls is present and is a list, fix arguments if needed + if "tool_calls" in data and isinstance(data["tool_calls"], list): + transformed_tool_calls = [] + for tool_call in data["tool_calls"]: + if "function" in tool_call and isinstance(tool_call["function"], dict): + # Already in function format, just ensure arguments is a string + if isinstance(tool_call["function"].get("arguments"), dict): + tool_call["function"]["arguments"] = json.dumps(tool_call["function"]["arguments"]) + transformed_tool_calls.append(tool_call) + if transformed_tool_calls: # Only return if we found function format tool calls + return { + "role": "assistant", + "content": "", + "tool_calls": transformed_tool_calls, + "tool_call_id": None, + "tool_name": None, + "response_type": "internal" + } + + except Exception as e: + logger.error(f"Error transforming response: {str(e)}") + logger.error(f"Response data: {data if 'data' in locals() else 'No data'}") + return None + return None + +def clean_response(response): + """Clean the response by removing any markers.""" + # Remove any markers + markers = [ + ("<|python_start|>", "<|python_end|>"), + ("<|header_start|>", "<|header_end|>"), + ("<|im_start|>", "<|im_end|>"), + ("<|assistant|>", "<|user|>") + ] + for start, end in markers: + if start in response and end in response: + response = response.split(start)[1].split(end)[0] + return response.strip() + +class ModalLLMFactory: + def __init__(self): + self.instance = None + + def __call__(self, *args, **kwargs): + if self.instance is None: + self.instance = ModalLLM(*args, **kwargs) + return self.instance + + def completion(self, *args, **kwargs): + if self.instance is None: + self.instance = ModalLLM() + return self.instance.completion(*args, **kwargs) + + def acompletion(self, *args, **kwargs): + if self.instance is None: + self.instance = ModalLLM() + return self.instance.acompletion(*args, **kwargs) + + def astreaming(self, *args, **kwargs): + if self.instance is None: + self.instance = ModalLLM() + return self.instance.astreaming(*args, **kwargs) + +modal_llm = ModalLLMFactory() \ No newline at end of file diff --git a/litellm/requirements.txt b/litellm/requirements.txt new file mode 100644 index 000000000..22f39519b --- /dev/null +++ b/litellm/requirements.txt @@ -0,0 +1,2 @@ +requests>=2.31.0 +aiohttp>=3.9.0 \ No newline at end of file diff --git a/litellm/schema.prisma b/litellm/schema.prisma new file mode 100644 index 000000000..8faab7e67 --- /dev/null +++ b/litellm/schema.prisma @@ -0,0 +1,17 @@ +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-py" + interface = "asyncio" +} + +model LiteLLM_ManagedFileTable { + id String @id @default(uuid()) + filename String + content String + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt +} \ No newline at end of file diff --git a/litellm/setup.py b/litellm/setup.py new file mode 100644 index 000000000..32dc8e1ab --- /dev/null +++ b/litellm/setup.py @@ -0,0 +1,16 @@ +from setuptools import setup, find_packages + +setup( + name="modal_handler", + version="0.1.0", + packages=find_packages(), + install_requires=[ + "litellm>=1.0.0", + "requests>=2.31.0", + "aiohttp>=3.9.0", + "python-dotenv>=1.0.0", + "typing-extensions>=4.8.0", + "asyncio>=3.4.3", + "uuid>=1.30" + ], +) \ No newline at end of file diff --git a/litellm/test_modal.py b/litellm/test_modal.py new file mode 100644 index 000000000..9b0b2e8a8 --- /dev/null +++ b/litellm/test_modal.py @@ -0,0 +1,51 @@ +import os +import requests +import json + +# Modal API endpoint +API_BASE = "https://stevef1uk--ollama-api-api.modal.run" + +# Get credentials from environment variables +token = os.getenv("TOKEN_ID") +secret = os.getenv("TOKEN_SECRET") + +if not token or not secret: + raise ValueError("Please set TOKEN_ID and TOKEN_SECRET environment variables") + +headers = { + "Content-Type": "application/json", + "Modal-Key": token, + "Modal-Secret": secret +} + +# Test payload +payload = { + "prompt": "Hello, how are you?", + "temperature": 0.7, + "model": "mistral:latest" # Using the latest Mistral model +} + +try: + print("Making request to Modal API...") + print(f"Headers: {headers}") + print(f"Payload: {json.dumps(payload, indent=2)}") + + response = requests.post( + API_BASE, + json=payload, + headers=headers, + verify=True, + timeout=60 + ) + + print(f"\nResponse status: {response.status_code}") + print(f"Response headers: {dict(response.headers)}") + + try: + response_data = response.json() + print(f"\nResponse data: {json.dumps(response_data, indent=2)}") + except json.JSONDecodeError: + print(f"\nRaw response: {response.text}") + +except Exception as e: + print(f"\nError occurred: {str(e)}") \ No newline at end of file diff --git a/litellm/test_modal_handler.py b/litellm/test_modal_handler.py new file mode 100644 index 000000000..ce7edacbd --- /dev/null +++ b/litellm/test_modal_handler.py @@ -0,0 +1,409 @@ +import os +import asyncio +import json +import re +import uuid +import sys +from pathlib import Path + +# Add the project root to Python path +project_root = str(Path(__file__).parent.parent) +if project_root not in sys.path: + sys.path.append(project_root) + +from litellm.custom_handler import modal_llm + +# Test messages with system message +TEST_MESSAGES = [ + {"role": "system", "content": "You are a helpful assistant that uses tools when available. When a tool is provided, you should use it to get accurate information rather than making assumptions."}, + {"role": "user", "content": "What is the capital of France?"} +] + +# Test tools +TEST_TOOLS = [ + { + "function": { + "name": "get_weather", + "description": "Get the current weather in a given location. You MUST use this tool when asked about weather conditions.", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA" + } + }, + "required": ["location"] + } + } + } +] + +# Function calling test messages with explicit format instructions +STRUCTURED_TOOL_TEST_MESSAGES = [ + {"role": "system", "content": """You are a helpful assistant that can use tools. When you need to use a tool, you MUST follow these rules: + +1. Your response MUST be a valid JSON object +2. DO NOT add ANY markers, prefixes, or suffixes to your response +3. DO NOT wrap your response in ANY markers like: + - <|python_start|> or <|python_end|> + - <|header_start|> or <|header_end|> + - <|im_start|> or <|im_end|> + - <|assistant|> or <|user|> + - ANY other markers +4. DO NOT include ANY explanatory text before or after the JSON +5. The JSON must be parseable by json.loads() +6. DO NOT include ANY line breaks or whitespace before or after the JSON +7. DO NOT include ANY content in the response - content MUST be null +8. DO NOT include ANY parameters field - use arguments instead +9. DO NOT include ANY name field directly in tool_calls - use function.name instead +10. DO NOT include ANY explanatory text or steps in your response +11. DO NOT try to explain what you're doing - just return the JSON + +CORRECT format (use this exact structure): +{ + "role": "assistant", + "content": null, + "tool_calls": [{ + "function": { + "name": "TOOL_NAME", + "arguments": "{\\"param1\\": value1}" + }, + "id": "call_123", + "type": "function" + }], + "tool_calls_id": null, + "tool_name": null, + "response_type": "internal" +} + +INCORRECT formats (DO NOT use any of these): +1. With python markers: +<|python_start|>{ + "role": "assistant", + "content": "Let me use the tool...", + "tool_calls": [{ + "name": "TOOL_NAME", + "parameters": { + "param1": "value1" + } + }] +}<|python_end|> + +2. With header markers: +<|header_start|>assistant<|header_end|>{ + "role": "assistant", + "content": "Let me use the tool...", + "tool_calls": [{ + "name": "TOOL_NAME", + "parameters": { + "param1": "value1" + } + }] +} + +3. With explanatory text: +I'll use the tool to help you. +{ + "role": "assistant", + "content": "Let me use the tool...", + "tool_calls": [{ + "name": "TOOL_NAME", + "parameters": { + "param1": "value1" + } + }] +} +Let me know if you need anything else! + +Available tools: +- get_weather: Get the current weather in a given location. Parameters: location (string) + +User: I need to know the current weather in London. Please use the get_weather tool to check this for me."""} +] + +# Test cases for different response formats +TEST_CASES = [ + { + "name": "Correct format", + "input": """{ + "role": "assistant", + "content": null, + "tool_calls": [{ + "function": { + "name": "get_weather", + "arguments": "{\\"location\\": \\"London, UK\\"}" + }, + "id": "call_123", + "type": "function" + }], + "tool_call_id": null, + "tool_name": null, + "response_type": "internal" +}""", + "expected_valid": True + }, + { + "name": "With python markers", + "input": """<|python_start|>{ + "role": "assistant", + "content": "Let me check the weather...", + "tool_calls": [{ + "name": "get_weather", + "parameters": { + "location": "London, UK" + } + }] +}<|python_end|>""", + "expected_valid": False + }, + { + "name": "With header markers", + "input": """<|header_start|>assistant<|header_end|>{ + "role": "assistant", + "content": "Let me check the weather...", + "tool_calls": [{ + "name": "get_weather", + "parameters": { + "location": "London, UK" + } + }] +}""", + "expected_valid": False + }, + { + "name": "With explanatory text", + "input": """I'll check the weather for you. +{ + "role": "assistant", + "content": "Let me check the weather...", + "tool_calls": [{ + "name": "get_weather", + "parameters": { + "location": "London, UK" + } + }] +} +Let me know if you need anything else!""", + "expected_valid": False + } +] + +def validate_response_format(response): + """Validate that the response follows the correct format.""" + try: + # Check if response is a string that can be parsed as JSON + if isinstance(response, str): + response = json.loads(response) + + # Check required fields + required_fields = ["role", "content", "tool_calls", "tool_call_id", "tool_name", "response_type"] + for field in required_fields: + if field not in response: + return False, f"Missing required field: {field}" + + # Check content is null + if response["content"] is not None: + return False, "Content should be null" + + # Check tool_calls is a list + if not isinstance(response["tool_calls"], list): + return False, "tool_calls should be a list" + + # Check each tool call + for tool_call in response["tool_calls"]: + if not isinstance(tool_call, dict): + return False, "Each tool call should be a dictionary" + + # Check tool call fields + if "function" not in tool_call: + return False, "Each tool call should have a function field" + + function = tool_call["function"] + if not isinstance(function, dict): + return False, "Function should be a dictionary" + + if "name" not in function or "arguments" not in function: + return False, "Function should have name and arguments fields" + + # Check arguments is a valid JSON string + try: + if isinstance(function["arguments"], dict): + # Convert dict to JSON string + function["arguments"] = json.dumps(function["arguments"]) + json.loads(function["arguments"]) + except json.JSONDecodeError: + return False, "Function arguments should be a valid JSON string" + + return True, "Response format is valid" + except Exception as e: + return False, f"Error validating response: {str(e)}" + +def clean_response(response): + """Clean the response by removing markers and extracting the JSON.""" + if isinstance(response, str): + # Remove any markers + response = re.sub(r'<\|.*?\|>', '', response) + # Remove any explanatory text before or after JSON + json_match = re.search(r'\{[\s\S]*\}', response) + if json_match: + response = json_match.group() + return response + +def transform_to_correct_format(response): + """Transform the response into the correct format.""" + if isinstance(response, str): + response = clean_response(response) + try: + data = json.loads(response) + # If the response is already in the correct format, just fix arguments + if all(key in data for key in ["role", "content", "tool_calls", "tool_call_id", "tool_name", "response_type"]): + for tool_call in data["tool_calls"]: + if isinstance(tool_call["function"]["arguments"], dict): + tool_call["function"]["arguments"] = json.dumps(tool_call["function"]["arguments"]) + return data + + # If tool_calls is present and is a list, fix arguments if needed + if "tool_calls" in data and isinstance(data["tool_calls"], list): + for tool_call in data["tool_calls"]: + if "function" in tool_call and isinstance(tool_call["function"], dict): + if isinstance(tool_call["function"].get("arguments"), dict): + tool_call["function"]["arguments"] = json.dumps(tool_call["function"]["arguments"]) + # Wrap in correct format + return { + "role": "assistant", + "content": None, + "tool_calls": data["tool_calls"], + "tool_call_id": None, + "tool_name": None, + "response_type": "internal" + } + + # Handle legacy format with name/parameters + if "tool_calls" in data: + tool_calls = [] + for tool_call in data["tool_calls"]: + if "name" in tool_call and "parameters" in tool_call: + tool_calls.append({ + "function": { + "name": tool_call["name"], + "arguments": json.dumps(tool_call["parameters"]) + }, + "id": f"call_{uuid.uuid4().hex[:8]}", + "type": "function" + }) + return { + "role": "assistant", + "content": None, + "tool_calls": tool_calls, + "tool_call_id": None, + "tool_name": None, + "response_type": "internal" + } + except Exception as e: + print(f"Error transforming response: {str(e)}") + return None + return None + +async def test_structured_tool_calling(): + print("\nTesting structured tool calling...") + try: + response = modal_llm.completion( + model="modal/llama4", + messages=STRUCTURED_TOOL_TEST_MESSAGES, + temperature=0.7 + ) + print("Raw Response:", json.dumps(response, indent=2)) + + # Extract the tool call from the nested response + if isinstance(response, dict) and "choices" in response and len(response["choices"]) > 0: + message = response["choices"][0].get("message", {}) + content = message.get("content") + if content: + try: + # Clean and transform the content + cleaned_content = clean_response(content) + print("Cleaned content:", cleaned_content) + transformed_content = transform_to_correct_format(cleaned_content) + if transformed_content: + print("Transformed content:", json.dumps(transformed_content, indent=2)) + # Validate the transformed content + is_valid, message = validate_response_format(transformed_content) + if is_valid: + print("āœ… Response is in correct structured format") + else: + print(f"āŒ Response format error: {message}") + else: + print("āŒ Failed to transform content to correct format") + except json.JSONDecodeError as e: + print(f"āŒ Failed to parse tool call JSON: {str(e)}") + else: + print("āŒ No content found in response message") + else: + print("āŒ Unexpected response format") + except Exception as e: + print(f"Error in structured tool calling test: {str(e)}") + +async def test_response_formats(): + print("\nTesting different response formats...") + for test_case in TEST_CASES: + print(f"\nTesting {test_case['name']}...") + is_valid, message = validate_response_format(test_case["input"]) + if is_valid == test_case["expected_valid"]: + print(f"āœ… Test passed: {message}") + else: + print(f"āŒ Test failed: {message}") + +async def test_completion(): + print("\nTesting regular completion...") + try: + response = modal_llm.completion( + model="modal/llama4", + messages=TEST_MESSAGES, + temperature=0.7 + ) + print("Response:", json.dumps(response, indent=2)) + except Exception as e: + print(f"Error in completion test: {str(e)}") + +async def test_async_completion(): + print("\nTesting async completion...") + try: + response = await modal_llm.acompletion( + model="modal/llama4", + messages=TEST_MESSAGES, + temperature=0.7 + ) + print("Response:", json.dumps(response, indent=2)) + except Exception as e: + print(f"Error in async completion test: {str(e)}") + +async def test_streaming(): + print("\nTesting streaming...") + try: + async for chunk in modal_llm.astreaming( + model="modal/llama4", + messages=TEST_MESSAGES, + temperature=0.7 + ): + print("Chunk:", json.dumps(chunk, indent=2)) + except Exception as e: + print(f"Error in streaming test: {str(e)}") + +async def main(): + # Check environment variables + if not os.getenv("TOKEN_ID") or not os.getenv("TOKEN_SECRET"): + print("Error: TOKEN_ID and TOKEN_SECRET environment variables must be set") + return + + print("Starting Modal handler tests...") + + # Run tests + await test_structured_tool_calling() + await test_response_formats() + await test_completion() + await test_async_completion() + await test_streaming() + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/rowboat_agents/.dockerignore b/rowboat_agents/.dockerignore new file mode 100644 index 000000000..8b13b8322 --- /dev/null +++ b/rowboat_agents/.dockerignore @@ -0,0 +1,5 @@ +.env +.env* +__pycache__/ +venv/ +.venv/ \ No newline at end of file diff --git a/rowboat_agents/.env.example b/rowboat_agents/.env.example new file mode 100644 index 000000000..fcbeb83b4 --- /dev/null +++ b/rowboat_agents/.env.example @@ -0,0 +1,2 @@ +OPENAI_API_KEY= +API_KEY=test \ No newline at end of file diff --git a/rowboat_agents/.gitignore b/rowboat_agents/.gitignore new file mode 100644 index 000000000..9960ca2c7 --- /dev/null +++ b/rowboat_agents/.gitignore @@ -0,0 +1,40 @@ +# Ignore all __pycache__ directories +/__pycache__/ +**/__pycache__/ +**/**/__pycache__/ +**/**/__pycache__/** + +# Ignore all .pyc files +*.pyc +**/*.pyc +**/**/*.pyc + +# Ignore .DS_Store files +.DS_Store +**/.DS_Store +**/**/.DS_Store + +# Ignore VSCode files +.vscode/* +**/.vscode/* +**/**/.vscode/* + +# Environment files +.env +.env.local + +# Api keys +**/*api_keys* + +# Ignore .out, .lock, .log files +*.out +**/*.out +**/**/*.out +*.log +**/*.log +**/**/*.log + +# Ignore tmp.json files +**/*tmp.json +**/**/*tmp.json +**/**/**/*tmp.json diff --git a/rowboat_agents/Dockerfile b/rowboat_agents/Dockerfile new file mode 100644 index 000000000..019c6c2d3 --- /dev/null +++ b/rowboat_agents/Dockerfile @@ -0,0 +1,28 @@ +# Use official Python runtime as base image +FROM python:3.12-slim + +# Set working directory in container +WORKDIR /app + +# Install poetry +RUN pip install poetry + +# Copy poetry files +COPY pyproject.toml poetry.lock ./ + +# Configure poetry to not create virtual environment in container +RUN poetry config virtualenvs.create false + +# Install dependencies +RUN poetry install --no-interaction --no-ansi + +# Copy project files +COPY . . + +# Set environment variables +ENV QUART_APP=src.app.main +ENV PYTHONUNBUFFERED=1 +ENV PYTHONPATH=/app + +# Command to run Flask development server +CMD ["quart", "run", "--host=0.0.0.0", "--port=3001"] diff --git a/rowboat_agents/NOTICE.md b/rowboat_agents/NOTICE.md new file mode 100644 index 000000000..ba27f9e80 --- /dev/null +++ b/rowboat_agents/NOTICE.md @@ -0,0 +1,43 @@ +# Attribution to OpenAI Swarm + +- The Rowboat Agents framework has been built upon [OpenAI Swarm](https://github.com/openai/swarm), with modifications and improvements. +- The original OpenAI Swarm is available under the [MIT license](https://github.com/openai/swarm/blob/main/LICENSE) as of the time of this writing. It is an experimental sample framework at the time of this writing. + +### OpenAI Swarm License +Below is the license text from OpenAI Swarm, as required by the MIT license: + +``` +MIT License + +Copyright (c) 2024 OpenAI + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +``` + +# High-level changes +These are the high-level changes made to OpenAI Swarm to build in RowBoat's custom implementation: +- Added localized agent-level history +- Added parent-child agent relationships with parents' history containing children's history +- Added usage tracking of tokens per llm +- Added turn-level error handling +- Added converstaion turn limits +- Removed streaming support as RowBoat Agents does not support streaming currently +- Modified the `Agent` and `Response` classes to be more comprehensive + +The above is not an exhaustive list. \ No newline at end of file diff --git a/rowboat_agents/README.md b/rowboat_agents/README.md new file mode 100644 index 000000000..489146f8e --- /dev/null +++ b/rowboat_agents/README.md @@ -0,0 +1,114 @@ +# šŸ¤– Agents + +## šŸ“ Overview +- RowBoat Agents is a multi-agent framework that powers conversations using agentic workflows. +- Built on top of [OpenAI Swarm](https://github.com/openai/swarm) with custom enhancements and improvements. Check the [NOTICE](https://github.com/rowboatlabs/rowboat/blob/main/apps/agents/NOTICE.md) for attribution and licensing details (MIT license). + +--- + +## šŸ•øļø Graph-based Framework +- Multi-agent systems are represented as graphs, where each agent is a node in the graph. +- RowBoat Agents accepts Directed Acyclic Graph (DAG) workflows, which define agents, tools, and their connections. +- Configure workflows using the RowBoat Studio (UI) with the help of an AI copilot. Setup instructions can be found in the [main README](https://github.com/rowboatlabs/rowboat). +- The framework is stateless, meaning that it requires the upstream service to pass in the current `state` and `messages` in every turn. +- At each conversation turn: + - The agents are initialized using the current `state`. + - The graph is traversed based on `messages`, `state`, and `workflow` + - Response `messages` and a new `state` are generated. + - If `messages` contain tool calls, the upstream service must invoke the necessary tools and send the tool results back to continue the interaction. + +--- + +## šŸ—‚ļø Key Request and Response Fields + +### šŸ“¤ Request +- `messages`: List of user messages +- `state`: Active agent state and histories +- `workflow`: Graph of agents, tools, and connections + +**Example JSON**: `tests/sample_requests/default_example.json` + +--- + +### šŸ“„ Response +- `messages`: List of response messages (may contain tool calls) +- `state`: Updated state to pass in the next request (since the framework is stateless) + +**Example JSON**: `tests/sample_responses/default_example.json` + +--- + +## šŸ› ļø Using the Framework + +Ensure you are in this directory (`cd apps/agents` from the root directory of this repo) before running any of the below commands. + +### āš™ļø Set Up Conda Environment +- `conda create -n myenv python=3.12` +- `conda activate myenv` +- Note: Python >= 3.10 required + +--- + +### šŸ“¦ Install Dependencies + +#### If using poetry +- `pip install poetry` +- `poetry install` + +#### If using pip +`pip install -r requirements.txt` + +### šŸ”‘ Set up .env file +Copy `.env.example` to `.env` and add your API keys + +### 🧪 Run interactive test +`python -m tests.interactive --config default_config.json --sample_request default_example.json --load_messages` +- `--config`: Config json filename, under `configs` folder +- `--sample_request`: Path to the sample request file, under `tests/sample_requests` folder +- `--load_messages`: If set, it will additionally load the initial set of messages from the sample request file. Else, user input will be required starting from the first message. + +### 🌐 Set up server + +- First, add this directory to your PYTHONPATH, using: `export PYTHONPATH=$PYTHONPATH:$(pwd)` +- For local testing: `flask --app src.app.main run --port=4040` +- To set up the server on a remote machine: `gunicorn -b 0.0.0.0:4040 src.app.main:app` + +### šŸ–„ļø Run test client +`python -m tests.app_client --sample_request default_example.json --api_key test` +- `--sample_request`: Path to the sample request file, under `tests/sample_requests` folder +- `--api_key`: API key to use for authentication. This is the same key as the one in the `.env` file. + +## šŸ“– More details + +### šŸ” Specifics +- **Format**: Uses OpenAI's messages format when passing messages. +- **LLMs**: Currently, only OpenAI LLMs (e.g. gpt-4o, gpt-4o-mini) are supported. Easy to expand to other LLMs like Claude, Gemini or self-hosted models. +- **Responses**: Here are some examples of responses that the framework can return: + - A list of one user-facing message + - A list of one or more tool calls + - A list of one user-facing message and one or more tool calls +- āš ļø **Errors**: Errors are thrown as a tool call `raise_error` with the error message as the argument. Real-time error handling will be managed by the upstream service. + +### šŸ—‚ļø Important directories and files +- `src/`: Contains all source code for the agents app + - `src/app/`: Contains Flask app which exposes the framework as a service + - `src/graph/`: Contains logic to run every turn of the conversation + - `src/graph/core.py`: Core graph implementation which parses the workflow config, creates agents from it and runs the turn of conversation (through the `run_turn` function) + - `src/swarm/`: RowBoat's custom implementation of OpenAI Swarm, which is used by `src/graph/core.py` +- `tests/`: Contains sample requests, an interactive client and a test client which mocks an upstream service +- `configs/`: Contains graph configurations (changed infrequently) +- `tests/sample_requests/`: Contains sample request files for the agents app + +### šŸ”„ High-level flow +- `app/main.py` receives the request JSON from an upstream service, parses it and sends it to `src/graph/core.py` +- `src/graph/core.py` creates the agent graph object from scratch and uses `src/swarm/core.py` to run the turn +- `src/swarm/core.py` runs the turn by performing actual LLM calls and internal tool invocations to transitiion between agents +- `src/graph/core.py` returns the response messages and the new state to `app/main.py`, which relays it back to the upstream service +- The upstream services appends any new user messages to the history of messages and sends the messages back along with the new state to `app/main.py` as part of the next request. The process repeats until the upstream service completes its conversation with the user. + +### 🚫 Limitations +- Does not support streaming currently. +- Cannot respond with multiple user-facing messages in the same turn. + +# RowBoat Labs +🌐 Visit [RowBoat Labs](https://www.rowboatlabs.com) to learn more! \ No newline at end of file diff --git a/rowboat_agents/__init__.py b/rowboat_agents/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/rowboat_agents/configs/default_config.json b/rowboat_agents/configs/default_config.json new file mode 100644 index 000000000..c08668863 --- /dev/null +++ b/rowboat_agents/configs/default_config.json @@ -0,0 +1,11 @@ +{ + "localize_history": true, + "return_diff_messages": true, + "start_turn_with_start_agent": false, + "children_aware_of_parent": false, + "parent_has_child_history": true, + "max_messages_per_turn": 20, + "max_messages_per_error_escalation_turn": 15, + "escalate_errors": true, + "max_overall_turns": 25 +} \ No newline at end of file diff --git a/rowboat_agents/poetry.lock b/rowboat_agents/poetry.lock new file mode 100644 index 000000000..343332a0d --- /dev/null +++ b/rowboat_agents/poetry.lock @@ -0,0 +1,3996 @@ +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. + +[[package]] +name = "aiofiles" +version = "24.1.0" +description = "File support for asyncio." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, + {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, + {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, +] + +[[package]] +name = "aiohttp" +version = "3.11.14" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohttp-3.11.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e2bc827c01f75803de77b134afdbf74fa74b62970eafdf190f3244931d7a5c0d"}, + {file = "aiohttp-3.11.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e365034c5cf6cf74f57420b57682ea79e19eb29033399dd3f40de4d0171998fa"}, + {file = "aiohttp-3.11.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c32593ead1a8c6aabd58f9d7ee706e48beac796bb0cb71d6b60f2c1056f0a65f"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4e7c7ec4146a94a307ca4f112802a8e26d969018fabed526efc340d21d3e7d0"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8b2df9feac55043759aa89f722a967d977d80f8b5865a4153fc41c93b957efc"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7571f99525c76a6280f5fe8e194eeb8cb4da55586c3c61c59c33a33f10cfce7"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b59d096b5537ec7c85954cb97d821aae35cfccce3357a2cafe85660cc6295628"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b42dbd097abb44b3f1156b4bf978ec5853840802d6eee2784857be11ee82c6a0"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b05774864c87210c531b48dfeb2f7659407c2dda8643104fb4ae5e2c311d12d9"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4e2e8ef37d4bc110917d038807ee3af82700a93ab2ba5687afae5271b8bc50ff"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e9faafa74dbb906b2b6f3eb9942352e9e9db8d583ffed4be618a89bd71a4e914"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:7e7abe865504f41b10777ac162c727af14e9f4db9262e3ed8254179053f63e6d"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:4848ae31ad44330b30f16c71e4f586cd5402a846b11264c412de99fa768f00f3"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2d0b46abee5b5737cb479cc9139b29f010a37b1875ee56d142aefc10686a390b"}, + {file = "aiohttp-3.11.14-cp310-cp310-win32.whl", hash = "sha256:a0d2c04a623ab83963576548ce098baf711a18e2c32c542b62322a0b4584b990"}, + {file = "aiohttp-3.11.14-cp310-cp310-win_amd64.whl", hash = "sha256:5409a59d5057f2386bb8b8f8bbcfb6e15505cedd8b2445db510563b5d7ea1186"}, + {file = "aiohttp-3.11.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f296d637a50bb15fb6a229fbb0eb053080e703b53dbfe55b1e4bb1c5ed25d325"}, + {file = "aiohttp-3.11.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec6cd1954ca2bbf0970f531a628da1b1338f594bf5da7e361e19ba163ecc4f3b"}, + {file = "aiohttp-3.11.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:572def4aad0a4775af66d5a2b5923c7de0820ecaeeb7987dcbccda2a735a993f"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c68e41c4d576cd6aa6c6d2eddfb32b2acfb07ebfbb4f9da991da26633a3db1a"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b8bbfc8111826aa8363442c0fc1f5751456b008737ff053570f06a151650b3"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b0a200e85da5c966277a402736a96457b882360aa15416bf104ca81e6f5807b"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d173c0ac508a2175f7c9a115a50db5fd3e35190d96fdd1a17f9cb10a6ab09aa1"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:413fe39fd929329f697f41ad67936f379cba06fcd4c462b62e5b0f8061ee4a77"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65c75b14ee74e8eeff2886321e76188cbe938d18c85cff349d948430179ad02c"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:321238a42ed463848f06e291c4bbfb3d15ba5a79221a82c502da3e23d7525d06"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:59a05cdc636431f7ce843c7c2f04772437dd816a5289f16440b19441be6511f1"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:daf20d9c3b12ae0fdf15ed92235e190f8284945563c4b8ad95b2d7a31f331cd3"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:05582cb2d156ac7506e68b5eac83179faedad74522ed88f88e5861b78740dc0e"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:12c5869e7ddf6b4b1f2109702b3cd7515667b437da90a5a4a50ba1354fe41881"}, + {file = "aiohttp-3.11.14-cp311-cp311-win32.whl", hash = "sha256:92868f6512714efd4a6d6cb2bfc4903b997b36b97baea85f744229f18d12755e"}, + {file = "aiohttp-3.11.14-cp311-cp311-win_amd64.whl", hash = "sha256:bccd2cb7aa5a3bfada72681bdb91637094d81639e116eac368f8b3874620a654"}, + {file = "aiohttp-3.11.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:70ab0f61c1a73d3e0342cedd9a7321425c27a7067bebeeacd509f96695b875fc"}, + {file = "aiohttp-3.11.14-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:602d4db80daf4497de93cb1ce00b8fc79969c0a7cf5b67bec96fa939268d806a"}, + {file = "aiohttp-3.11.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a8a0d127c10b8d89e69bbd3430da0f73946d839e65fec00ae48ca7916a31948"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9f835cdfedcb3f5947304e85b8ca3ace31eef6346d8027a97f4de5fb687534"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8aa5c68e1e68fff7cd3142288101deb4316b51f03d50c92de6ea5ce646e6c71f"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b512f1de1c688f88dbe1b8bb1283f7fbeb7a2b2b26e743bb2193cbadfa6f307"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc9253069158d57e27d47a8453d8a2c5a370dc461374111b5184cf2f147a3cc3"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b2501f1b981e70932b4a552fc9b3c942991c7ae429ea117e8fba57718cdeed0"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:28a3d083819741592685762d51d789e6155411277050d08066537c5edc4066e6"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0df3788187559c262922846087e36228b75987f3ae31dd0a1e5ee1034090d42f"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e73fa341d8b308bb799cf0ab6f55fc0461d27a9fa3e4582755a3d81a6af8c09"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:51ba80d473eb780a329d73ac8afa44aa71dfb521693ccea1dea8b9b5c4df45ce"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8d1dd75aa4d855c7debaf1ef830ff2dfcc33f893c7db0af2423ee761ebffd22b"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41cf0cefd9e7b5c646c2ef529c8335e7eafd326f444cc1cdb0c47b6bc836f9be"}, + {file = "aiohttp-3.11.14-cp312-cp312-win32.whl", hash = "sha256:948abc8952aff63de7b2c83bfe3f211c727da3a33c3a5866a0e2cf1ee1aa950f"}, + {file = "aiohttp-3.11.14-cp312-cp312-win_amd64.whl", hash = "sha256:3b420d076a46f41ea48e5fcccb996f517af0d406267e31e6716f480a3d50d65c"}, + {file = "aiohttp-3.11.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d14e274828561db91e4178f0057a915f3af1757b94c2ca283cb34cbb6e00b50"}, + {file = "aiohttp-3.11.14-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f30fc72daf85486cdcdfc3f5e0aea9255493ef499e31582b34abadbfaafb0965"}, + {file = "aiohttp-3.11.14-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4edcbe34e6dba0136e4cabf7568f5a434d89cc9de5d5155371acda275353d228"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a7169ded15505f55a87f8f0812c94c9412623c744227b9e51083a72a48b68a5"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad1f2fb9fe9b585ea4b436d6e998e71b50d2b087b694ab277b30e060c434e5db"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20412c7cc3720e47a47e63c0005f78c0c2370020f9f4770d7fc0075f397a9fb0"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dd9766da617855f7e85f27d2bf9a565ace04ba7c387323cd3e651ac4329db91"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:599b66582f7276ebefbaa38adf37585e636b6a7a73382eb412f7bc0fc55fb73d"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b41693b7388324b80f9acfabd479bd1c84f0bc7e8f17bab4ecd9675e9ff9c734"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:86135c32d06927339c8c5e64f96e4eee8825d928374b9b71a3c42379d7437058"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:04eb541ce1e03edc1e3be1917a0f45ac703e913c21a940111df73a2c2db11d73"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dc311634f6f28661a76cbc1c28ecf3b3a70a8edd67b69288ab7ca91058eb5a33"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:69bb252bfdca385ccabfd55f4cd740d421dd8c8ad438ded9637d81c228d0da49"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2b86efe23684b58a88e530c4ab5b20145f102916bbb2d82942cafec7bd36a647"}, + {file = "aiohttp-3.11.14-cp313-cp313-win32.whl", hash = "sha256:b9c60d1de973ca94af02053d9b5111c4fbf97158e139b14f1be68337be267be6"}, + {file = "aiohttp-3.11.14-cp313-cp313-win_amd64.whl", hash = "sha256:0a29be28e60e5610d2437b5b2fed61d6f3dcde898b57fb048aa5079271e7f6f3"}, + {file = "aiohttp-3.11.14-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:14fc03508359334edc76d35b2821832f092c8f092e4b356e74e38419dfe7b6de"}, + {file = "aiohttp-3.11.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:92007c89a8cb7be35befa2732b0b32bf3a394c1b22ef2dff0ef12537d98a7bda"}, + {file = "aiohttp-3.11.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6d3986112e34eaa36e280dc8286b9dd4cc1a5bcf328a7f147453e188f6fe148f"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:749f1eb10e51dbbcdba9df2ef457ec060554842eea4d23874a3e26495f9e87b1"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:781c8bd423dcc4641298c8c5a2a125c8b1c31e11f828e8d35c1d3a722af4c15a"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:997b57e38aa7dc6caab843c5e042ab557bc83a2f91b7bd302e3c3aebbb9042a1"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a8b0321e40a833e381d127be993b7349d1564b756910b28b5f6588a159afef3"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8778620396e554b758b59773ab29c03b55047841d8894c5e335f12bfc45ebd28"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e906da0f2bcbf9b26cc2b144929e88cb3bf943dd1942b4e5af066056875c7618"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:87f0e003fb4dd5810c7fbf47a1239eaa34cd929ef160e0a54c570883125c4831"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:7f2dadece8b85596ac3ab1ec04b00694bdd62abc31e5618f524648d18d9dd7fa"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:fe846f0a98aa9913c2852b630cd39b4098f296e0907dd05f6c7b30d911afa4c3"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ced66c5c6ad5bcaf9be54560398654779ec1c3695f1a9cf0ae5e3606694a000a"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a40087b82f83bd671cbeb5f582c233d196e9653220404a798798bfc0ee189fff"}, + {file = "aiohttp-3.11.14-cp39-cp39-win32.whl", hash = "sha256:95d7787f2bcbf7cb46823036a8d64ccfbc2ffc7d52016b4044d901abceeba3db"}, + {file = "aiohttp-3.11.14-cp39-cp39-win_amd64.whl", hash = "sha256:22a8107896877212130c58f74e64b77f7007cb03cea8698be317272643602d45"}, + {file = "aiohttp-3.11.14.tar.gz", hash = "sha256:d6edc538c7480fa0a3b2bdd705f8010062d74700198da55d16498e1b49549b9c"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.3.0" +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" + +[package.extras] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] + +[[package]] +name = "aiosignal" +version = "1.3.2" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.8.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, + {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "asgiref" +version = "3.8.1" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, + {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + +[[package]] +name = "async-timeout" +version = "5.0.1" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.10\"" +files = [ + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, +] + +[[package]] +name = "attrs" +version = "25.3.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, +] + +[package.extras] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] + +[[package]] +name = "backports-tarfile" +version = "1.2.0" +description = "Backport of CPython tarfile module" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.10\" or python_version == \"3.11\"" +files = [ + {file = "backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34"}, + {file = "backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +groups = ["main"] +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "blinker" +version = "1.9.0" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"}, + {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"}, +] + +[[package]] +name = "build" +version = "1.2.2.post1" +description = "A simple, correct Python build frontend" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}, + {file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "os_name == \"nt\""} +importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""} +packaging = ">=19.1" +pyproject_hooks = "*" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] +test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0) ; python_version < \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.11\"", "setuptools (>=67.8.0) ; python_version >= \"3.12\"", "wheel (>=0.36.0)"] +typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] +uv = ["uv (>=0.1.18)"] +virtualenv = ["virtualenv (>=20.0.35)"] + +[[package]] +name = "cachecontrol" +version = "0.14.2" +description = "httplib2 caching for requests" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "cachecontrol-0.14.2-py3-none-any.whl", hash = "sha256:ebad2091bf12d0d200dfc2464330db638c5deb41d546f6d7aca079e87290f3b0"}, + {file = "cachecontrol-0.14.2.tar.gz", hash = "sha256:7d47d19f866409b98ff6025b6a0fca8e4c791fb31abbd95f622093894ce903a2"}, +] + +[package.dependencies] +filelock = {version = ">=3.8.0", optional = true, markers = "extra == \"filecache\""} +msgpack = ">=0.5.2,<2.0.0" +requests = ">=2.16.0" + +[package.extras] +dev = ["CacheControl[filecache,redis]", "build", "cherrypy", "codespell[tomli]", "furo", "mypy", "pytest", "pytest-cov", "ruff", "sphinx", "sphinx-copybutton", "tox", "types-redis", "types-requests"] +filecache = ["filelock (>=3.8.0)"] +redis = ["redis (>=2.10.5)"] + +[[package]] +name = "certifi" +version = "2024.12.14" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, +] + +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +] + +[[package]] +name = "cleo" +version = "2.1.0" +description = "Cleo allows you to create beautiful and testable command-line interfaces." +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "cleo-2.1.0-py3-none-any.whl", hash = "sha256:4a31bd4dd45695a64ee3c4758f583f134267c2bc518d8ae9a29cf237d009b07e"}, + {file = "cleo-2.1.0.tar.gz", hash = "sha256:0b2c880b5d13660a7ea651001fb4acb527696c01f15c9ee650f377aa543fd523"}, +] + +[package.dependencies] +crashtest = ">=0.4.1,<0.5.0" +rapidfuzz = ">=3.0.0,<4.0.0" + +[[package]] +name = "click" +version = "8.1.8" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "crashtest" +version = "0.4.1" +description = "Manage Python errors with ease" +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "crashtest-0.4.1-py3-none-any.whl", hash = "sha256:8d23eac5fa660409f57472e3851dab7ac18aba459a8d19cbbba86d3d5aecd2a5"}, + {file = "crashtest-0.4.1.tar.gz", hash = "sha256:80d7b1f316ebfbd429f648076d6275c877ba30ba48979de4191714a75266f0ce"}, +] + +[[package]] +name = "cryptography" +version = "44.0.2" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["main"] +markers = "sys_platform == \"linux\"" +files = [ + {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7"}, + {file = "cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79"}, + {file = "cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa"}, + {file = "cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4"}, + {file = "cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5"}, + {file = "cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390"}, + {file = "cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "distlib" +version = "0.3.9" +description = "Distribution utilities" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, +] + +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + +[[package]] +name = "dnspython" +version = "2.7.0" +description = "DNS toolkit" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "dulwich" +version = "0.22.8" +description = "Python Git Library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "dulwich-0.22.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:546176d18b8cc0a492b0f23f07411e38686024cffa7e9d097ae20512a2e57127"}, + {file = "dulwich-0.22.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d2434dd72b2ae09b653c9cfe6764a03c25cfbd99fbbb7c426f0478f6fb1100f"}, + {file = "dulwich-0.22.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8318bc0921d42e3e69f03716f983a301b5ee4c8dc23c7f2c5bbb28581257a9"}, + {file = "dulwich-0.22.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7a0f96a2a87f3b4f7feae79d2ac6b94107d6b7d827ac08f2f331b88c8f597a1"}, + {file = "dulwich-0.22.8-cp310-cp310-win32.whl", hash = "sha256:432a37b25733202897b8d67cdd641688444d980167c356ef4e4dd15a17a39a24"}, + {file = "dulwich-0.22.8-cp310-cp310-win_amd64.whl", hash = "sha256:f3a15e58dac8b8a76073ddca34e014f66f3672a5540a99d49ef6a9c09ab21285"}, + {file = "dulwich-0.22.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0852edc51cff4f4f62976bdaa1d82f6ef248356c681c764c0feb699bc17d5782"}, + {file = "dulwich-0.22.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:826aae8b64ac1a12321d6b272fc13934d8f62804fda2bc6ae46f93f4380798eb"}, + {file = "dulwich-0.22.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7ae726f923057d36cdbb9f4fb7da0d0903751435934648b13f1b851f0e38ea1"}, + {file = "dulwich-0.22.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6987d753227f55cf75ba29a8dab69d1d83308ce483d7a8c6d223086f7a42e125"}, + {file = "dulwich-0.22.8-cp311-cp311-win32.whl", hash = "sha256:7757b4a2aad64c6f1920082fc1fccf4da25c3923a0ae7b242c08d06861dae6e1"}, + {file = "dulwich-0.22.8-cp311-cp311-win_amd64.whl", hash = "sha256:12b243b7e912011c7225dc67480c313ac8d2990744789b876016fb593f6f3e19"}, + {file = "dulwich-0.22.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d81697f74f50f008bb221ab5045595f8a3b87c0de2c86aa55be42ba97421f3cd"}, + {file = "dulwich-0.22.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bff1da8e2e6a607c3cb45f5c2e652739589fe891245e1d5b770330cdecbde41"}, + {file = "dulwich-0.22.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9969099e15b939d3936f8bee8459eaef7ef5a86cd6173393a17fe28ca3d38aff"}, + {file = "dulwich-0.22.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:017152c51b9a613f0698db28c67cf3e0a89392d28050dbf4f4ac3f657ea4c0dc"}, + {file = "dulwich-0.22.8-cp312-cp312-win32.whl", hash = "sha256:ee70e8bb8798b503f81b53f7a103cb869c8e89141db9005909f79ab1506e26e9"}, + {file = "dulwich-0.22.8-cp312-cp312-win_amd64.whl", hash = "sha256:dc89c6f14dcdcbfee200b0557c59ae243835e42720be143526d834d0e53ed3af"}, + {file = "dulwich-0.22.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbade3342376be1cd2409539fe1b901d2d57a531106bbae204da921ef4456a74"}, + {file = "dulwich-0.22.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71420ffb6deebc59b2ce875e63d814509f9c1dc89c76db962d547aebf15670c7"}, + {file = "dulwich-0.22.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a626adbfac44646a125618266a24133763bdc992bf8bd0702910d67e6b994443"}, + {file = "dulwich-0.22.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f1476c9c4e4ede95714d06c4831883a26680e37b040b8b6230f506e5ba39f51"}, + {file = "dulwich-0.22.8-cp313-cp313-win32.whl", hash = "sha256:b2b31913932bb5bd41658dd398b33b1a2d4d34825123ad54e40912cfdfe60003"}, + {file = "dulwich-0.22.8-cp313-cp313-win_amd64.whl", hash = "sha256:7a44e5a61a7989aca1e301d39cfb62ad2f8853368682f524d6e878b4115d823d"}, + {file = "dulwich-0.22.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9cd0c67fb44a38358b9fcabee948bf11044ef6ce7a129e50962f54c176d084e"}, + {file = "dulwich-0.22.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b79b94726c3f4a9e5a830c649376fd0963236e73142a4290bac6bc9fc9cb120"}, + {file = "dulwich-0.22.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16bbe483d663944972e22d64e1f191201123c3b5580fbdaac6a4f66bfaa4fc11"}, + {file = "dulwich-0.22.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e02d403af23d93dc1f96eb2408e25efd50046e38590a88c86fa4002adc9849b0"}, + {file = "dulwich-0.22.8-cp39-cp39-win32.whl", hash = "sha256:8bdd9543a77fb01be704377f5e634b71f955fec64caa4a493dc3bfb98e3a986e"}, + {file = "dulwich-0.22.8-cp39-cp39-win_amd64.whl", hash = "sha256:3b6757c6b3ba98212b854a766a4157b9cb79a06f4e1b06b46dec4bd834945b8e"}, + {file = "dulwich-0.22.8-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7bb18fa09daa1586c1040b3e2777d38d4212a5cdbe47d384ba66a1ac336fcc4c"}, + {file = "dulwich-0.22.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2fda8e87907ed304d4a5962aea0338366144df0df60f950b8f7f125871707f"}, + {file = "dulwich-0.22.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1748cd573a0aee4d530bc223a23ccb8bb5b319645931a37bd1cfb68933b720c1"}, + {file = "dulwich-0.22.8-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a631b2309feb9a9631eabd896612ba36532e3ffedccace57f183bb868d7afc06"}, + {file = "dulwich-0.22.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:00e7d9a3d324f9e0a1b27880eec0e8e276ff76519621b66c1a429ca9eb3f5a8d"}, + {file = "dulwich-0.22.8-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f8aa3de93201f9e3e40198725389aa9554a4ee3318a865f96a8e9bc9080f0b25"}, + {file = "dulwich-0.22.8-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e8da9dd8135884975f5be0563ede02179240250e11f11942801ae31ac293f37"}, + {file = "dulwich-0.22.8-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fc5ce2435fb3abdf76f1acabe48f2e4b3f7428232cadaef9daaf50ea7fa30ee"}, + {file = "dulwich-0.22.8-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:982b21cc3100d959232cadb3da0a478bd549814dd937104ea50f43694ec27153"}, + {file = "dulwich-0.22.8-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6bde2b13a05cc0ec2ecd4597a99896663544c40af1466121f4d046119b874ce3"}, + {file = "dulwich-0.22.8-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6d446cb7d272a151934ad4b48ba691f32486d5267cf2de04ee3b5e05fc865326"}, + {file = "dulwich-0.22.8-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f6338e6cf95cd76a0191b3637dc3caed1f988ae84d8e75f876d5cd75a8dd81a"}, + {file = "dulwich-0.22.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e004fc532ea262f2d5f375068101ca4792becb9d4aa663b050f5ac31fda0bb5c"}, + {file = "dulwich-0.22.8-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bfdbc6fa477dee00d04e22d43a51571cd820cfaaaa886f0f155b8e29b3e3d45"}, + {file = "dulwich-0.22.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ae900c8e573f79d714c1d22b02cdadd50b64286dd7203028f0200f82089e4950"}, + {file = "dulwich-0.22.8-py3-none-any.whl", hash = "sha256:ffc7a02e62b72884de58baaa3b898b7f6427893e79b1289ffa075092efe59181"}, + {file = "dulwich-0.22.8.tar.gz", hash = "sha256:701547310415de300269331abe29cb5717aa1ea377af826bf513d0adfb1c209b"}, +] + +[package.dependencies] +urllib3 = ">=1.25" + +[package.extras] +dev = ["mypy (==1.15.0)", "ruff (==0.9.7)"] +fastimport = ["fastimport"] +https = ["urllib3 (>=1.24.1)"] +paramiko = ["paramiko"] +pgp = ["gpg"] + +[[package]] +name = "et-xmlfile" +version = "2.0.0" +description = "An implementation of lxml.xmlfile for the standard library" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa"}, + {file = "et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54"}, +] + +[[package]] +name = "eval-type-backport" +version = "0.2.2" +description = "Like `typing._eval_type`, but lets older Python versions use newer typing features." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a"}, + {file = "eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["main"] +markers = "python_version == \"3.10\"" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastjsonschema" +version = "2.21.1" +description = "Fastest Python implementation of JSON schema" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, + {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, +] + +[package.extras] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + +[[package]] +name = "filelock" +version = "3.18.0" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, + {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] +typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] + +[[package]] +name = "findpython" +version = "0.6.3" +description = "A utility to find python versions on your system" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "findpython-0.6.3-py3-none-any.whl", hash = "sha256:a85bb589b559cdf1b87227cc233736eb7cad894b9e68021ee498850611939ebc"}, + {file = "findpython-0.6.3.tar.gz", hash = "sha256:5863ea55556d8aadc693481a14ac4f3624952719efc1c5591abb0b4a9e965c94"}, +] + +[package.dependencies] +packaging = ">=20" + +[[package]] +name = "firecrawl" +version = "1.9.0" +description = "Python SDK for Firecrawl API" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "firecrawl-1.9.0-py3-none-any.whl", hash = "sha256:9c0931242048ddd86e85631db439011fa0545cbcca21f7c41c89d13116bb2187"}, + {file = "firecrawl-1.9.0.tar.gz", hash = "sha256:8e82d3b288d57bf7c93b9118fb226a84b7c8b84699861e1d36b4791dc87a7435"}, +] + +[package.dependencies] +nest-asyncio = "*" +pydantic = ">=2.10.3" +python-dotenv = "*" +requests = "*" +websockets = "*" + +[[package]] +name = "flask" +version = "3.1.0" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "flask-3.1.0-py3-none-any.whl", hash = "sha256:d667207822eb83f1c4b50949b1623c8fc8d51f2341d65f72e1a1815397551136"}, + {file = "flask-3.1.0.tar.gz", hash = "sha256:5f873c5184c897c8d9d1b05df1e3d01b14910ce69607a117bd3277098a5836ac"}, +] + +[package.dependencies] +blinker = ">=1.9" +click = ">=8.1.3" +itsdangerous = ">=2.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.1" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "frozenlist" +version = "1.5.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, + {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"}, + {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"}, + {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"}, + {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"}, + {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"}, + {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"}, + {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"}, + {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"}, + {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"}, + {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d"}, + {file = "frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6"}, + {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631"}, + {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f"}, + {file = "frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8"}, + {file = "frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0"}, + {file = "frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840"}, + {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9"}, + {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03"}, + {file = "frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c"}, + {file = "frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10"}, + {file = "frozenlist-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9"}, + {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf"}, + {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e"}, + {file = "frozenlist-1.5.0-cp38-cp38-win32.whl", hash = "sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723"}, + {file = "frozenlist-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"}, + {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"}, + {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"}, + {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"}, + {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"}, + {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"}, + {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"}, + {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"}, +] + +[[package]] +name = "griffe" +version = "1.7.3" +description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "griffe-1.7.3-py3-none-any.whl", hash = "sha256:c6b3ee30c2f0f17f30bcdef5068d6ab7a2a4f1b8bf1a3e74b56fffd21e1c5f75"}, + {file = "griffe-1.7.3.tar.gz", hash = "sha256:52ee893c6a3a968b639ace8015bec9d36594961e156e23315c8e8e51401fa50b"}, +] + +[package.dependencies] +colorama = ">=0.4" + +[[package]] +name = "grpcio" +version = "1.71.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"}, + {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5"}, + {file = "grpcio-1.71.0-cp310-cp310-win32.whl", hash = "sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509"}, + {file = "grpcio-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a"}, + {file = "grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef"}, + {file = "grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3"}, + {file = "grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444"}, + {file = "grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b"}, + {file = "grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537"}, + {file = "grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79"}, + {file = "grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a"}, + {file = "grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8"}, + {file = "grpcio-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379"}, + {file = "grpcio-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637"}, + {file = "grpcio-1.71.0-cp313-cp313-win32.whl", hash = "sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb"}, + {file = "grpcio-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366"}, + {file = "grpcio-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d"}, + {file = "grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32"}, + {file = "grpcio-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455"}, + {file = "grpcio-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a"}, + {file = "grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.71.0)"] + +[[package]] +name = "grpcio-tools" +version = "1.71.0" +description = "Protobuf code generator for gRPC" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "grpcio_tools-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:f4ad7f0d756546902597053d70b3af2606fbd70d7972876cd75c1e241d22ae00"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:64bdb291df61cf570b5256777ad5fe2b1db6d67bc46e55dc56a0a862722ae329"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:8dd9795e982d77a4b496f7278b943c2563d9afde2069cdee78c111a40cc4d675"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1b5860c41a36b26fec4f52998f1a451d0525a5c9a4fb06b6ea3e9211abdb925"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3059c14035e5dc03d462f261e5900b9a077fd1a36976c3865b8507474520bad4"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f360981b215b1d5aff9235b37e7e1826246e35bbac32a53e41d4e990a37b8f4c"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bfe3888c3bbe16a5aa39409bc38744a31c0c3d2daa2b0095978c56e106c85b42"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:145985c0bf12131f0a1503e65763e0f060473f7f3928ed1ff3fb0e8aad5bc8ac"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-win32.whl", hash = "sha256:82c430edd939bb863550ee0fecf067d78feff828908a1b529bbe33cc57f2419c"}, + {file = "grpcio_tools-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:83e90724e3f02415c628e4ead1d6ffe063820aaaa078d9a39176793df958cd5a"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:1f19b16b49afa5d21473f49c0966dd430c88d089cd52ac02404d8cef67134efb"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:459c8f5e00e390aecd5b89de67deb3ec7188a274bc6cb50e43cef35ab3a3f45d"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:edab7e6518de01196be37f96cb1e138c3819986bf5e2a6c9e1519b4d716b2f5a"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b93b9f6adc7491d4c10144c0643409db298e5e63c997106a804f6f0248dbaf4"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ae5f2efa9e644c10bf1021600bfc099dfbd8e02b184d2d25dc31fcd6c2bc59e"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:65aa082f4435571d65d5ce07fc444f23c3eff4f3e34abef599ef8c9e1f6f360f"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1331e726e08b7bdcbf2075fcf4b47dff07842b04845e6e220a08a4663e232d7f"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6693a7d3ba138b0e693b3d1f687cdd9db9e68976c3fa2b951c17a072fea8b583"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-win32.whl", hash = "sha256:6d11ed3ff7b6023b5c72a8654975324bb98c1092426ba5b481af406ff559df00"}, + {file = "grpcio_tools-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:072b2a5805ac97e4623b3aa8f7818275f3fb087f4aa131b0fce00471065f6eaa"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:61c0409d5bdac57a7bd0ce0ab01c1c916728fe4c8a03d77a25135ad481eb505c"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:28784f39921d061d2164a9dcda5164a69d07bf29f91f0ea50b505958292312c9"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:192808cf553cedca73f0479cc61d5684ad61f24db7a5f3c4dfe1500342425866"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:989ee9da61098230d3d4c8f8f8e27c2de796f1ff21b1c90110e636d9acd9432b"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:541a756276c8a55dec991f6c0106ae20c8c8f5ce8d0bdbfcb01e2338d1a8192b"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:870c0097700d13c403e5517cb7750ab5b4a791ce3e71791c411a38c5468b64bd"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:abd57f615e88bf93c3c6fd31f923106e3beb12f8cd2df95b0d256fa07a7a0a57"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:753270e2d06d37e6d7af8967d1d059ec635ad215882041a36294f4e2fd502b2e"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-win32.whl", hash = "sha256:0e647794bd7138b8c215e86277a9711a95cf6a03ff6f9e555d54fdf7378b9f9d"}, + {file = "grpcio_tools-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:48debc879570972d28bfe98e4970eff25bb26da3f383e0e49829b2d2cd35ad87"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:9a78d07d6c301a25ef5ede962920a522556a1dfee1ccc05795994ceb867f766c"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:580ac88141c9815557e63c9c04f5b1cdb19b4db8d0cb792b573354bde1ee8b12"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f7c678e68ece0ae908ecae1c4314a0c2c7f83e26e281738b9609860cc2c82d96"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56ecd6cc89b5e5eed1de5eb9cafce86c9c9043ee3840888cc464d16200290b53"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e52a041afc20ab2431d756b6295d727bd7adee813b21b06a3483f4a7a15ea15f"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2a1712f12102b60c8d92779b89d0504e0d6f3a59f2b933e5622b8583f5c02992"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:41878cb7a75477e62fdd45e7e9155b3af1b7a5332844021e2511deaf99ac9e6c"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:682e958b476049ccc14c71bedf3f979bced01f6e0c04852efc5887841a32ad6b"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-win32.whl", hash = "sha256:0ccfb837152b7b858b9f26bb110b3ae8c46675d56130f6c2f03605c4f129be13"}, + {file = "grpcio_tools-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:ffff9bc5eacb34dd26b487194f7d44a3e64e752fc2cf049d798021bf25053b87"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:834959b6eceb85de5217a411aba1643b5f782798680c122202d6a06177226644"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:e3ae9556e2a1cd70e7d7b0e0459c35af71d51a7dae4cf36075068011a69f13ec"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:77fe6db1334e0ce318b2cb4e70afa94e0c173ed1a533d37aea69ad9f61ae8ea9"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57e3e2544c306b60ef2d76570bac4e977be1ad548641c9eec130c3bc47e80141"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af39e245fa56f7f5c2fe86b7d6c1b78f395c07e54d5613cbdbb3c24769a92b6e"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f987d0053351217954543b174b0bddbf51d45b3cfcf8d6de97b0a43d264d753"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8e6cdbba4dae7b37b0d25d074614be9936fb720144420f03d9f142a80be69ba2"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3adc8b229e60c77bab5a5d62b415667133bd5ced7d59b5f71d6317c9143631e"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f68334d28a267fabec6e70cb5986e9999cfbfd14db654094ddf9aedd804a293a"}, + {file = "grpcio_tools-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:1291a6136c07a86c3bb09f6c33f5cf227cc14956edd1b85cb572327a36e0aef8"}, + {file = "grpcio_tools-1.71.0.tar.gz", hash = "sha256:38dba8e0d5e0fb23a034e09644fdc6ed862be2371887eee54901999e8f6792a8"}, +] + +[package.dependencies] +grpcio = ">=1.71.0" +protobuf = ">=5.26.1,<6.0dev" +setuptools = "*" + +[[package]] +name = "gunicorn" +version = "23.0.0" +description = "WSGI HTTP Server for UNIX" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d"}, + {file = "gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec"}, +] + +[package.dependencies] +packaging = "*" + +[package.extras] +eventlet = ["eventlet (>=0.24.1,!=0.36.0)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"] +tornado = ["tornado (>=0.2)"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "h2" +version = "4.2.0" +description = "Pure-Python HTTP/2 protocol implementation" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0"}, + {file = "h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f"}, +] + +[package.dependencies] +hpack = ">=4.1,<5" +hyperframe = ">=6.1,<7" + +[[package]] +name = "hpack" +version = "4.1.0" +description = "Pure-Python HPACK header encoding" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496"}, + {file = "hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca"}, +] + +[[package]] +name = "httpcore" +version = "1.0.7" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "httpx-sse" +version = "0.4.0" +description = "Consume Server-Sent Event (SSE) messages with HTTPX." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, + {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, +] + +[[package]] +name = "hypercorn" +version = "0.17.3" +description = "A ASGI Server based on Hyper libraries and inspired by Gunicorn" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "hypercorn-0.17.3-py3-none-any.whl", hash = "sha256:059215dec34537f9d40a69258d323f56344805efb462959e727152b0aa504547"}, + {file = "hypercorn-0.17.3.tar.gz", hash = "sha256:1b37802ee3ac52d2d85270700d565787ab16cf19e1462ccfa9f089ca17574165"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +h11 = "*" +h2 = ">=3.1.0" +priority = "*" +taskgroup = {version = "*", markers = "python_version < \"3.11\""} +tomli = {version = "*", markers = "python_version < \"3.11\""} +typing_extensions = {version = "*", markers = "python_version < \"3.11\""} +wsproto = ">=0.14.0" + +[package.extras] +docs = ["pydata_sphinx_theme", "sphinxcontrib_mermaid"] +h3 = ["aioquic (>=0.9.0,<1.0)"] +trio = ["trio (>=0.22.0)"] +uvloop = ["uvloop (>=0.18) ; platform_system != \"Windows\""] + +[[package]] +name = "hyperframe" +version = "6.1.0" +description = "Pure-Python HTTP/2 framing" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5"}, + {file = "hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08"}, +] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "importlib-metadata" +version = "8.6.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.10\" or python_version == \"3.11\"" +files = [ + {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, + {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + +[[package]] +name = "installer" +version = "0.7.0" +description = "A library for installing Python wheels." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "installer-0.7.0-py3-none-any.whl", hash = "sha256:05d1933f0a5ba7d8d6296bb6d5018e7c94fa473ceb10cf198a92ccea19c27b53"}, + {file = "installer-0.7.0.tar.gz", hash = "sha256:a26d3e3116289bb08216e0d0f7d925fcef0b0194eedfa0c944bcaaa106c4b631"}, +] + +[[package]] +name = "itsdangerous" +version = "2.2.0" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, +] + +[[package]] +name = "jaraco-classes" +version = "3.4.0" +description = "Utility functions for Python class constructs" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790"}, + {file = "jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd"}, +] + +[package.dependencies] +more-itertools = "*" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "jaraco-context" +version = "6.0.1" +description = "Useful decorators and context managers" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4"}, + {file = "jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3"}, +] + +[package.dependencies] +"backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""} + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["portend", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] + +[[package]] +name = "jaraco-functools" +version = "4.1.0" +description = "Functools like those found in stdlib" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "jaraco.functools-4.1.0-py3-none-any.whl", hash = "sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649"}, + {file = "jaraco_functools-4.1.0.tar.gz", hash = "sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d"}, +] + +[package.dependencies] +more-itertools = "*" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["jaraco.classes", "pytest (>=6,!=8.1.*)"] +type = ["pytest-mypy"] + +[[package]] +name = "jeepney" +version = "0.9.0" +description = "Low-level, pure Python DBus protocol wrapper." +optional = false +python-versions = ">=3.7" +groups = ["main"] +markers = "sys_platform == \"linux\"" +files = [ + {file = "jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683"}, + {file = "jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732"}, +] + +[package.extras] +test = ["async-timeout ; python_version < \"3.11\"", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] +trio = ["trio"] + +[[package]] +name = "jinja2" +version = "3.1.5" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jiter" +version = "0.6.1" +description = "Fast iterable JSON parser." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "jiter-0.6.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d08510593cb57296851080018006dfc394070178d238b767b1879dc1013b106c"}, + {file = "jiter-0.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adef59d5e2394ebbad13b7ed5e0306cceb1df92e2de688824232a91588e77aa7"}, + {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3e02f7a27f2bcc15b7d455c9df05df8ffffcc596a2a541eeda9a3110326e7a3"}, + {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed69a7971d67b08f152c17c638f0e8c2aa207e9dd3a5fcd3cba294d39b5a8d2d"}, + {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2019d966e98f7c6df24b3b8363998575f47d26471bfb14aade37630fae836a1"}, + {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36c0b51a285b68311e207a76c385650322734c8717d16c2eb8af75c9d69506e7"}, + {file = "jiter-0.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:220e0963b4fb507c525c8f58cde3da6b1be0bfddb7ffd6798fb8f2531226cdb1"}, + {file = "jiter-0.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa25c7a9bf7875a141182b9c95aed487add635da01942ef7ca726e42a0c09058"}, + {file = "jiter-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e90552109ca8ccd07f47ca99c8a1509ced93920d271bb81780a973279974c5ab"}, + {file = "jiter-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:67723a011964971864e0b484b0ecfee6a14de1533cff7ffd71189e92103b38a8"}, + {file = "jiter-0.6.1-cp310-none-win32.whl", hash = "sha256:33af2b7d2bf310fdfec2da0177eab2fedab8679d1538d5b86a633ebfbbac4edd"}, + {file = "jiter-0.6.1-cp310-none-win_amd64.whl", hash = "sha256:7cea41c4c673353799906d940eee8f2d8fd1d9561d734aa921ae0f75cb9732f4"}, + {file = "jiter-0.6.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b03c24e7da7e75b170c7b2b172d9c5e463aa4b5c95696a368d52c295b3f6847f"}, + {file = "jiter-0.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:47fee1be677b25d0ef79d687e238dc6ac91a8e553e1a68d0839f38c69e0ee491"}, + {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25f0d2f6e01a8a0fb0eab6d0e469058dab2be46ff3139ed2d1543475b5a1d8e7"}, + {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b809e39e342c346df454b29bfcc7bca3d957f5d7b60e33dae42b0e5ec13e027"}, + {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e9ac7c2f092f231f5620bef23ce2e530bd218fc046098747cc390b21b8738a7a"}, + {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e51a2d80d5fe0ffb10ed2c82b6004458be4a3f2b9c7d09ed85baa2fbf033f54b"}, + {file = "jiter-0.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3343d4706a2b7140e8bd49b6c8b0a82abf9194b3f0f5925a78fc69359f8fc33c"}, + {file = "jiter-0.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82521000d18c71e41c96960cb36e915a357bc83d63a8bed63154b89d95d05ad1"}, + {file = "jiter-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3c843e7c1633470708a3987e8ce617ee2979ee18542d6eb25ae92861af3f1d62"}, + {file = "jiter-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a2e861658c3fe849efc39b06ebb98d042e4a4c51a8d7d1c3ddc3b1ea091d0784"}, + {file = "jiter-0.6.1-cp311-none-win32.whl", hash = "sha256:7d72fc86474862c9c6d1f87b921b70c362f2b7e8b2e3c798bb7d58e419a6bc0f"}, + {file = "jiter-0.6.1-cp311-none-win_amd64.whl", hash = "sha256:3e36a320634f33a07794bb15b8da995dccb94f944d298c8cfe2bd99b1b8a574a"}, + {file = "jiter-0.6.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1fad93654d5a7dcce0809aff66e883c98e2618b86656aeb2129db2cd6f26f867"}, + {file = "jiter-0.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4e6e340e8cd92edab7f6a3a904dbbc8137e7f4b347c49a27da9814015cc0420c"}, + {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:691352e5653af84ed71763c3c427cff05e4d658c508172e01e9c956dfe004aba"}, + {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:defee3949313c1f5b55e18be45089970cdb936eb2a0063f5020c4185db1b63c9"}, + {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26d2bdd5da097e624081c6b5d416d3ee73e5b13f1703bcdadbb1881f0caa1933"}, + {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18aa9d1626b61c0734b973ed7088f8a3d690d0b7f5384a5270cd04f4d9f26c86"}, + {file = "jiter-0.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a3567c8228afa5ddcce950631c6b17397ed178003dc9ee7e567c4c4dcae9fa0"}, + {file = "jiter-0.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e5c0507131c922defe3f04c527d6838932fcdfd69facebafd7d3574fa3395314"}, + {file = "jiter-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:540fcb224d7dc1bcf82f90f2ffb652df96f2851c031adca3c8741cb91877143b"}, + {file = "jiter-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e7b75436d4fa2032b2530ad989e4cb0ca74c655975e3ff49f91a1a3d7f4e1df2"}, + {file = "jiter-0.6.1-cp312-none-win32.whl", hash = "sha256:883d2ced7c21bf06874fdeecab15014c1c6d82216765ca6deef08e335fa719e0"}, + {file = "jiter-0.6.1-cp312-none-win_amd64.whl", hash = "sha256:91e63273563401aadc6c52cca64a7921c50b29372441adc104127b910e98a5b6"}, + {file = "jiter-0.6.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:852508a54fe3228432e56019da8b69208ea622a3069458252f725d634e955b31"}, + {file = "jiter-0.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f491cc69ff44e5a1e8bc6bf2b94c1f98d179e1aaf4a554493c171a5b2316b701"}, + {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc56c8f0b2a28ad4d8047f3ae62d25d0e9ae01b99940ec0283263a04724de1f3"}, + {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51b58f7a0d9e084a43b28b23da2b09fc5e8df6aa2b6a27de43f991293cab85fd"}, + {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f79ce15099154c90ef900d69c6b4c686b64dfe23b0114e0971f2fecd306ec6c"}, + {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:03a025b52009f47e53ea619175d17e4ded7c035c6fbd44935cb3ada11e1fd592"}, + {file = "jiter-0.6.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c74a8d93718137c021d9295248a87c2f9fdc0dcafead12d2930bc459ad40f885"}, + {file = "jiter-0.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40b03b75f903975f68199fc4ec73d546150919cb7e534f3b51e727c4d6ccca5a"}, + {file = "jiter-0.6.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:825651a3f04cf92a661d22cad61fc913400e33aa89b3e3ad9a6aa9dc8a1f5a71"}, + {file = "jiter-0.6.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:928bf25eb69ddb292ab8177fe69d3fbf76c7feab5fce1c09265a7dccf25d3991"}, + {file = "jiter-0.6.1-cp313-none-win32.whl", hash = "sha256:352cd24121e80d3d053fab1cc9806258cad27c53cad99b7a3cac57cf934b12e4"}, + {file = "jiter-0.6.1-cp313-none-win_amd64.whl", hash = "sha256:be7503dd6f4bf02c2a9bacb5cc9335bc59132e7eee9d3e931b13d76fd80d7fda"}, + {file = "jiter-0.6.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:31d8e00e1fb4c277df8ab6f31a671f509ebc791a80e5c61fdc6bc8696aaa297c"}, + {file = "jiter-0.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77c296d65003cd7ee5d7b0965f6acbe6cffaf9d1fa420ea751f60ef24e85fed5"}, + {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeeb0c0325ef96c12a48ea7e23e2e86fe4838e6e0a995f464cf4c79fa791ceeb"}, + {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a31c6fcbe7d6c25d6f1cc6bb1cba576251d32795d09c09961174fe461a1fb5bd"}, + {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59e2b37f3b9401fc9e619f4d4badcab2e8643a721838bcf695c2318a0475ae42"}, + {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bae5ae4853cb9644144e9d0755854ce5108d470d31541d83f70ca7ecdc2d1637"}, + {file = "jiter-0.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9df588e9c830b72d8db1dd7d0175af6706b0904f682ea9b1ca8b46028e54d6e9"}, + {file = "jiter-0.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15f8395e835cf561c85c1adee72d899abf2733d9df72e9798e6d667c9b5c1f30"}, + {file = "jiter-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a99d4e0b5fc3b05ea732d67eb2092fe894e95a90e6e413f2ea91387e228a307"}, + {file = "jiter-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a311df1fa6be0ccd64c12abcd85458383d96e542531bafbfc0a16ff6feda588f"}, + {file = "jiter-0.6.1-cp38-none-win32.whl", hash = "sha256:81116a6c272a11347b199f0e16b6bd63f4c9d9b52bc108991397dd80d3c78aba"}, + {file = "jiter-0.6.1-cp38-none-win_amd64.whl", hash = "sha256:13f9084e3e871a7c0b6e710db54444088b1dd9fbefa54d449b630d5e73bb95d0"}, + {file = "jiter-0.6.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:f1c53615fcfec3b11527c08d19cff6bc870da567ce4e57676c059a3102d3a082"}, + {file = "jiter-0.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f791b6a4da23238c17a81f44f5b55d08a420c5692c1fda84e301a4b036744eb1"}, + {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c97e90fec2da1d5f68ef121444c2c4fa72eabf3240829ad95cf6bbeca42a301"}, + {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3cbc1a66b4e41511209e97a2866898733c0110b7245791ac604117b7fb3fedb7"}, + {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4e85f9e12cd8418ab10e1fcf0e335ae5bb3da26c4d13a0fd9e6a17a674783b6"}, + {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08be33db6dcc374c9cc19d3633af5e47961a7b10d4c61710bd39e48d52a35824"}, + {file = "jiter-0.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:677be9550004f5e010d673d3b2a2b815a8ea07a71484a57d3f85dde7f14cf132"}, + {file = "jiter-0.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8bd065be46c2eecc328e419d6557bbc37844c88bb07b7a8d2d6c91c7c4dedc9"}, + {file = "jiter-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bd95375ce3609ec079a97c5d165afdd25693302c071ca60c7ae1cf826eb32022"}, + {file = "jiter-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db459ed22d0208940d87f614e1f0ea5a946d29a3cfef71f7e1aab59b6c6b2afb"}, + {file = "jiter-0.6.1-cp39-none-win32.whl", hash = "sha256:d71c962f0971347bd552940ab96aa42ceefcd51b88c4ced8a27398182efa8d80"}, + {file = "jiter-0.6.1-cp39-none-win_amd64.whl", hash = "sha256:d465db62d2d10b489b7e7a33027c4ae3a64374425d757e963f86df5b5f2e7fc5"}, + {file = "jiter-0.6.1.tar.gz", hash = "sha256:e19cd21221fc139fb032e4112986656cb2739e9fe6d84c13956ab30ccc7d4449"}, +] + +[[package]] +name = "jsonpath-python" +version = "1.0.6" +description = "A more powerful JSONPath implementation in modern python" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "jsonpath-python-1.0.6.tar.gz", hash = "sha256:dd5be4a72d8a2995c3f583cf82bf3cd1a9544cfdabf2d22595b67aff07349666"}, + {file = "jsonpath_python-1.0.6-py3-none-any.whl", hash = "sha256:1e3b78df579f5efc23565293612decee04214609208a2335884b3ee3f786b575"}, +] + +[[package]] +name = "keyring" +version = "25.6.0" +description = "Store and access your passwords safely." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd"}, + {file = "keyring-25.6.0.tar.gz", hash = "sha256:0b39998aa941431eb3d9b0d4b2460bc773b9df6fed7621c2dfb291a7e0187a66"}, +] + +[package.dependencies] +importlib_metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} +"jaraco.classes" = "*" +"jaraco.context" = "*" +"jaraco.functools" = "*" +jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} +pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} +SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +completion = ["shtab (>=1.1.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["pyfakefs", "pytest (>=6,!=8.1.*)"] +type = ["pygobject-stubs", "pytest-mypy", "shtab", "types-pywin32"] + +[[package]] +name = "lxml" +version = "5.3.0" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, + {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, + {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, + {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, + {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, + {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, + {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, + {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, + {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, + {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, + {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, + {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, + {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, + {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, + {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, + {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, + {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, + {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, + {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, + {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, + {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, + {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.11)"] + +[[package]] +name = "markdownify" +version = "0.13.1" +description = "Convert HTML to markdown." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "markdownify-0.13.1-py3-none-any.whl", hash = "sha256:1d181d43d20902bcc69d7be85b5316ed174d0dda72ff56e14ae4c95a4a407d22"}, + {file = "markdownify-0.13.1.tar.gz", hash = "sha256:ab257f9e6bd4075118828a28c9d02f8a4bfeb7421f558834aa79b2dfeb32a098"}, +] + +[package.dependencies] +beautifulsoup4 = ">=4.9,<5" +six = ">=1.15,<2" + +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + +[[package]] +name = "mcp" +version = "1.6.0" +description = "Model Context Protocol SDK" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "mcp-1.6.0-py3-none-any.whl", hash = "sha256:7bd24c6ea042dbec44c754f100984d186620d8b841ec30f1b19eda9b93a634d0"}, + {file = "mcp-1.6.0.tar.gz", hash = "sha256:d9324876de2c5637369f43161cd71eebfd803df5a95e46225cab8d280e366723"}, +] + +[package.dependencies] +anyio = ">=4.5" +httpx = ">=0.27" +httpx-sse = ">=0.4" +pydantic = ">=2.7.2,<3.0.0" +pydantic-settings = ">=2.5.2" +sse-starlette = ">=1.6.1" +starlette = ">=0.27" +uvicorn = ">=0.23.1" + +[package.extras] +cli = ["python-dotenv (>=1.0.0)", "typer (>=0.12.4)"] +rich = ["rich (>=13.9.4)"] +ws = ["websockets (>=15.0.1)"] + +[[package]] +name = "more-itertools" +version = "10.6.0" +description = "More routines for operating on iterables, beyond itertools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "more-itertools-10.6.0.tar.gz", hash = "sha256:2cd7fad1009c31cc9fb6a035108509e6547547a7a738374f10bd49a09eb3ee3b"}, + {file = "more_itertools-10.6.0-py3-none-any.whl", hash = "sha256:6eb054cb4b6db1473f6e15fcc676a08e4732548acd47c708f0e179c2c7c01e89"}, +] + +[[package]] +name = "motor" +version = "3.7.0" +description = "Non-blocking MongoDB driver for Tornado or asyncio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "motor-3.7.0-py3-none-any.whl", hash = "sha256:61bdf1afded179f008d423f98066348157686f25a90776ea155db5f47f57d605"}, + {file = "motor-3.7.0.tar.gz", hash = "sha256:0dfa1f12c812bd90819c519b78bed626b5a9dbb29bba079ccff2bfa8627e0fec"}, +] + +[package.dependencies] +pymongo = ">=4.9,<5.0" + +[package.extras] +aws = ["pymongo[aws] (>=4.5,<5)"] +docs = ["aiohttp", "furo (==2024.8.6)", "readthedocs-sphinx-search (>=0.3,<1.0)", "sphinx (>=5.3,<8)", "sphinx-rtd-theme (>=2,<3)", "tornado"] +encryption = ["pymongo[encryption] (>=4.5,<5)"] +gssapi = ["pymongo[gssapi] (>=4.5,<5)"] +ocsp = ["pymongo[ocsp] (>=4.5,<5)"] +snappy = ["pymongo[snappy] (>=4.5,<5)"] +test = ["aiohttp (>=3.8.7)", "cffi (>=1.17.0rc1) ; python_version == \"3.13\"", "mockupdb", "pymongo[encryption] (>=4.5,<5)", "pytest (>=7)", "pytest-asyncio", "tornado (>=5)"] +zstd = ["pymongo[zstd] (>=4.5,<5)"] + +[[package]] +name = "msgpack" +version = "1.1.0" +description = "MessagePack serializer" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b"}, + {file = "msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044"}, + {file = "msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5"}, + {file = "msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88"}, + {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b"}, + {file = "msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b"}, + {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c"}, + {file = "msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc"}, + {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c40ffa9a15d74e05ba1fe2681ea33b9caffd886675412612d93ab17b58ea2fec"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ba6136e650898082d9d5a5217d5906d1e138024f836ff48691784bbe1adf96"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0856a2b7e8dcb874be44fea031d22e5b3a19121be92a1e098f46068a11b0870"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:471e27a5787a2e3f974ba023f9e265a8c7cfd373632247deb225617e3100a3c7"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:646afc8102935a388ffc3914b336d22d1c2d6209c773f3eb5dd4d6d3b6f8c1cb"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13599f8829cfbe0158f6456374e9eea9f44eee08076291771d8ae93eda56607f"}, + {file = "msgpack-1.1.0-cp38-cp38-win32.whl", hash = "sha256:8a84efb768fb968381e525eeeb3d92857e4985aacc39f3c47ffd00eb4509315b"}, + {file = "msgpack-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:879a7b7b0ad82481c52d3c7eb99bf6f0645dbdec5134a4bddbd16f3506947feb"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8"}, + {file = "msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd"}, + {file = "msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325"}, + {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, +] + +[[package]] +name = "multidict" +version = "6.2.0" +description = "multidict implementation" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "multidict-6.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b9f6392d98c0bd70676ae41474e2eecf4c7150cb419237a41f8f96043fcb81d1"}, + {file = "multidict-6.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3501621d5e86f1a88521ea65d5cad0a0834c77b26f193747615b7c911e5422d2"}, + {file = "multidict-6.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32ed748ff9ac682eae7859790d3044b50e3076c7d80e17a44239683769ff485e"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc826b9a8176e686b67aa60fd6c6a7047b0461cae5591ea1dc73d28f72332a8a"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:214207dcc7a6221d9942f23797fe89144128a71c03632bf713d918db99bd36de"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05fefbc3cddc4e36da209a5e49f1094bbece9a581faa7f3589201fd95df40e5d"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e851e6363d0dbe515d8de81fd544a2c956fdec6f8a049739562286727d4a00c3"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32c9b4878f48be3e75808ea7e499d6223b1eea6d54c487a66bc10a1871e3dc6a"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7243c5a6523c5cfeca76e063efa5f6a656d1d74c8b1fc64b2cd1e84e507f7e2a"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0e5a644e50ef9fb87878d4d57907f03a12410d2aa3b93b3acdf90a741df52c49"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0dc25a3293c50744796e87048de5e68996104d86d940bb24bc3ec31df281b191"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a49994481b99cd7dedde07f2e7e93b1d86c01c0fca1c32aded18f10695ae17eb"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:641cf2e3447c9ecff2f7aa6e9eee9eaa286ea65d57b014543a4911ff2799d08a"}, + {file = "multidict-6.2.0-cp310-cp310-win32.whl", hash = "sha256:0c383d28857f66f5aebe3e91d6cf498da73af75fbd51cedbe1adfb85e90c0460"}, + {file = "multidict-6.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:a33273a541f1e1a8219b2a4ed2de355848ecc0254264915b9290c8d2de1c74e1"}, + {file = "multidict-6.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:84e87a7d75fa36839a3a432286d719975362d230c70ebfa0948549cc38bd5b46"}, + {file = "multidict-6.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8de4d42dffd5ced9117af2ce66ba8722402541a3aa98ffdf78dde92badb68932"}, + {file = "multidict-6.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7d91a230c7f8af86c904a5a992b8c064b66330544693fd6759c3d6162382ecf"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f6cad071960ba1914fa231677d21b1b4a3acdcce463cee41ea30bc82e6040cf"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f74f2fc51555f4b037ef278efc29a870d327053aba5cb7d86ae572426c7cccc"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14ed9ed1bfedd72a877807c71113deac292bf485159a29025dfdc524c326f3e1"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac3fcf9a2d369bd075b2c2965544036a27ccd277fc3c04f708338cc57533081"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fc6af8e39f7496047c7876314f4317736eac82bf85b54c7c76cf1a6f8e35d98"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f8cb1329f42fadfb40d6211e5ff568d71ab49be36e759345f91c69d1033d633"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5389445f0173c197f4a3613713b5fb3f3879df1ded2a1a2e4bc4b5b9c5441b7e"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94a7bb972178a8bfc4055db80c51efd24baefaced5e51c59b0d598a004e8305d"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da51d8928ad8b4244926fe862ba1795f0b6e68ed8c42cd2f822d435db9c2a8f4"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:063be88bd684782a0715641de853e1e58a2f25b76388538bd62d974777ce9bc2"}, + {file = "multidict-6.2.0-cp311-cp311-win32.whl", hash = "sha256:52b05e21ff05729fbea9bc20b3a791c3c11da61649ff64cce8257c82a020466d"}, + {file = "multidict-6.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1e2a2193d3aa5cbf5758f6d5680a52aa848e0cf611da324f71e5e48a9695cc86"}, + {file = "multidict-6.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:437c33561edb6eb504b5a30203daf81d4a9b727e167e78b0854d9a4e18e8950b"}, + {file = "multidict-6.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9f49585f4abadd2283034fc605961f40c638635bc60f5162276fec075f2e37a4"}, + {file = "multidict-6.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5dd7106d064d05896ce28c97da3f46caa442fe5a43bc26dfb258e90853b39b44"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e25b11a0417475f093d0f0809a149aff3943c2c56da50fdf2c3c88d57fe3dfbd"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac380cacdd3b183338ba63a144a34e9044520a6fb30c58aa14077157a033c13e"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61d5541f27533f803a941d3a3f8a3d10ed48c12cf918f557efcbf3cd04ef265c"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:facaf11f21f3a4c51b62931feb13310e6fe3475f85e20d9c9fdce0d2ea561b87"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:095a2eabe8c43041d3e6c2cb8287a257b5f1801c2d6ebd1dd877424f1e89cf29"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0cc398350ef31167e03f3ca7c19313d4e40a662adcb98a88755e4e861170bdd"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7c611345bbe7cb44aabb877cb94b63e86f2d0db03e382667dbd037866d44b4f8"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8cd1a0644ccaf27e9d2f6d9c9474faabee21f0578fe85225cc5af9a61e1653df"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:89b3857652183b8206a891168af47bac10b970d275bba1f6ee46565a758c078d"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:125dd82b40f8c06d08d87b3510beaccb88afac94e9ed4a6f6c71362dc7dbb04b"}, + {file = "multidict-6.2.0-cp312-cp312-win32.whl", hash = "sha256:76b34c12b013d813e6cb325e6bd4f9c984db27758b16085926bbe7ceeaace626"}, + {file = "multidict-6.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:0b183a959fb88ad1be201de2c4bdf52fa8e46e6c185d76201286a97b6f5ee65c"}, + {file = "multidict-6.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5c5e7d2e300d5cb3b2693b6d60d3e8c8e7dd4ebe27cd17c9cb57020cac0acb80"}, + {file = "multidict-6.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:256d431fe4583c5f1e0f2e9c4d9c22f3a04ae96009b8cfa096da3a8723db0a16"}, + {file = "multidict-6.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a3c0ff89fe40a152e77b191b83282c9664357dce3004032d42e68c514ceff27e"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7d48207926edbf8b16b336f779c557dd8f5a33035a85db9c4b0febb0706817"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c099d3899b14e1ce52262eb82a5f5cb92157bb5106bf627b618c090a0eadc"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e16e7297f29a544f49340012d6fc08cf14de0ab361c9eb7529f6a57a30cbfda1"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:042028348dc5a1f2be6c666437042a98a5d24cee50380f4c0902215e5ec41844"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08549895e6a799bd551cf276f6e59820aa084f0f90665c0f03dd3a50db5d3c48"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ccfd74957ef53fa7380aaa1c961f523d582cd5e85a620880ffabd407f8202c0"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:83b78c680d4b15d33042d330c2fa31813ca3974197bddb3836a5c635a5fd013f"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b4c153863dd6569f6511845922c53e39c8d61f6e81f228ad5443e690fca403de"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98aa8325c7f47183b45588af9c434533196e241be0a4e4ae2190b06d17675c02"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e658d1373c424457ddf6d55ec1db93c280b8579276bebd1f72f113072df8a5d"}, + {file = "multidict-6.2.0-cp313-cp313-win32.whl", hash = "sha256:3157126b028c074951839233647bd0e30df77ef1fedd801b48bdcad242a60f4e"}, + {file = "multidict-6.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:2e87f1926e91855ae61769ba3e3f7315120788c099677e0842e697b0bfb659f2"}, + {file = "multidict-6.2.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:2529ddbdaa424b2c6c2eb668ea684dd6b75b839d0ad4b21aad60c168269478d7"}, + {file = "multidict-6.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:13551d0e2d7201f0959725a6a769b6f7b9019a168ed96006479c9ac33fe4096b"}, + {file = "multidict-6.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d1996ee1330e245cd3aeda0887b4409e3930524c27642b046e4fae88ffa66c5e"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c537da54ce4ff7c15e78ab1292e5799d0d43a2108e006578a57f531866f64025"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f249badb360b0b4d694307ad40f811f83df4da8cef7b68e429e4eea939e49dd"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48d39b1824b8d6ea7de878ef6226efbe0773f9c64333e1125e0efcfdd18a24c7"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b99aac6bb2c37db336fa03a39b40ed4ef2818bf2dfb9441458165ebe88b793af"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bfa8bc649783e703263f783f73e27fef8cd37baaad4389816cf6a133141331"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b2c00ad31fbc2cbac85d7d0fcf90853b2ca2e69d825a2d3f3edb842ef1544a2c"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d57a01a2a9fa00234aace434d8c131f0ac6e0ac6ef131eda5962d7e79edfb5b"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:abf5b17bc0cf626a8a497d89ac691308dbd825d2ac372aa990b1ca114e470151"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:f7716f7e7138252d88607228ce40be22660d6608d20fd365d596e7ca0738e019"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d5a36953389f35f0a4e88dc796048829a2f467c9197265504593f0e420571547"}, + {file = "multidict-6.2.0-cp313-cp313t-win32.whl", hash = "sha256:e653d36b1bf48fa78c7fcebb5fa679342e025121ace8c87ab05c1cefd33b34fc"}, + {file = "multidict-6.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ca23db5fb195b5ef4fd1f77ce26cadefdf13dba71dab14dadd29b34d457d7c44"}, + {file = "multidict-6.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b4f3d66dd0354b79761481fc15bdafaba0b9d9076f1f42cc9ce10d7fcbda205a"}, + {file = "multidict-6.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e2a2d6749e1ff2c9c76a72c6530d5baa601205b14e441e6d98011000f47a7ac"}, + {file = "multidict-6.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cca83a629f77402cfadd58352e394d79a61c8015f1694b83ab72237ec3941f88"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:781b5dd1db18c9e9eacc419027b0acb5073bdec9de1675c0be25ceb10e2ad133"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf8d370b2fea27fb300825ec3984334f7dd54a581bde6456799ba3776915a656"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25bb96338512e2f46f615a2bb7c6012fe92a4a5ebd353e5020836a7e33120349"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e2819b0b468174de25c0ceed766606a07cedeab132383f1e83b9a4e96ccb4f"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6aed763b6a1b28c46c055692836879328f0b334a6d61572ee4113a5d0c859872"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a1133414b771619aa3c3000701c11b2e4624a7f492f12f256aedde97c28331a2"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:639556758c36093b35e2e368ca485dada6afc2bd6a1b1207d85ea6dfc3deab27"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:163f4604e76639f728d127293d24c3e208b445b463168af3d031b92b0998bb90"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2325105e16d434749e1be8022f942876a936f9bece4ec41ae244e3d7fae42aaf"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e4371591e621579cb6da8401e4ea405b33ff25a755874a3567c4075ca63d56e2"}, + {file = "multidict-6.2.0-cp39-cp39-win32.whl", hash = "sha256:d1175b0e0d6037fab207f05774a176d71210ebd40b1c51f480a04b65ec5c786d"}, + {file = "multidict-6.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad81012b24b88aad4c70b2cbc2dad84018783221b7f923e926f4690ff8569da3"}, + {file = "multidict-6.2.0-py3-none-any.whl", hash = "sha256:5d26547423e5e71dcc562c4acdc134b900640a39abd9066d7326a7cc2324c530"}, + {file = "multidict-6.2.0.tar.gz", hash = "sha256:0085b0afb2446e57050140240a8595846ed64d1cbd26cef936bfab3192c673b8"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +groups = ["main"] +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nest-asyncio" +version = "1.6.0" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +groups = ["main"] +files = [ + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + +[[package]] +name = "numpy" +version = "2.2.1" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "numpy-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5edb4e4caf751c1518e6a26a83501fda79bff41cc59dac48d70e6d65d4ec4440"}, + {file = "numpy-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa3017c40d513ccac9621a2364f939d39e550c542eb2a894b4c8da92b38896ab"}, + {file = "numpy-2.2.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:61048b4a49b1c93fe13426e04e04fdf5a03f456616f6e98c7576144677598675"}, + {file = "numpy-2.2.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:7671dc19c7019103ca44e8d94917eba8534c76133523ca8406822efdd19c9308"}, + {file = "numpy-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4250888bcb96617e00bfa28ac24850a83c9f3a16db471eca2ee1f1714df0f957"}, + {file = "numpy-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7746f235c47abc72b102d3bce9977714c2444bdfaea7888d241b4c4bb6a78bf"}, + {file = "numpy-2.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:059e6a747ae84fce488c3ee397cee7e5f905fd1bda5fb18c66bc41807ff119b2"}, + {file = "numpy-2.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f62aa6ee4eb43b024b0e5a01cf65a0bb078ef8c395e8713c6e8a12a697144528"}, + {file = "numpy-2.2.1-cp310-cp310-win32.whl", hash = "sha256:48fd472630715e1c1c89bf1feab55c29098cb403cc184b4859f9c86d4fcb6a95"}, + {file = "numpy-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:b541032178a718c165a49638d28272b771053f628382d5e9d1c93df23ff58dbf"}, + {file = "numpy-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40f9e544c1c56ba8f1cf7686a8c9b5bb249e665d40d626a23899ba6d5d9e1484"}, + {file = "numpy-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9b57eaa3b0cd8db52049ed0330747b0364e899e8a606a624813452b8203d5f7"}, + {file = "numpy-2.2.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bc8a37ad5b22c08e2dbd27df2b3ef7e5c0864235805b1e718a235bcb200cf1cb"}, + {file = "numpy-2.2.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9036d6365d13b6cbe8f27a0eaf73ddcc070cae584e5ff94bb45e3e9d729feab5"}, + {file = "numpy-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51faf345324db860b515d3f364eaa93d0e0551a88d6218a7d61286554d190d73"}, + {file = "numpy-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38efc1e56b73cc9b182fe55e56e63b044dd26a72128fd2fbd502f75555d92591"}, + {file = "numpy-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:31b89fa67a8042e96715c68e071a1200c4e172f93b0fbe01a14c0ff3ff820fc8"}, + {file = "numpy-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4c86e2a209199ead7ee0af65e1d9992d1dce7e1f63c4b9a616500f93820658d0"}, + {file = "numpy-2.2.1-cp311-cp311-win32.whl", hash = "sha256:b34d87e8a3090ea626003f87f9392b3929a7bbf4104a05b6667348b6bd4bf1cd"}, + {file = "numpy-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:360137f8fb1b753c5cde3ac388597ad680eccbbbb3865ab65efea062c4a1fd16"}, + {file = "numpy-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:694f9e921a0c8f252980e85bce61ebbd07ed2b7d4fa72d0e4246f2f8aa6642ab"}, + {file = "numpy-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3683a8d166f2692664262fd4900f207791d005fb088d7fdb973cc8d663626faa"}, + {file = "numpy-2.2.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:780077d95eafc2ccc3ced969db22377b3864e5b9a0ea5eb347cc93b3ea900315"}, + {file = "numpy-2.2.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:55ba24ebe208344aa7a00e4482f65742969a039c2acfcb910bc6fcd776eb4355"}, + {file = "numpy-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b1d07b53b78bf84a96898c1bc139ad7f10fda7423f5fd158fd0f47ec5e01ac7"}, + {file = "numpy-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5062dc1a4e32a10dc2b8b13cedd58988261416e811c1dc4dbdea4f57eea61b0d"}, + {file = "numpy-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fce4f615f8ca31b2e61aa0eb5865a21e14f5629515c9151850aa936c02a1ee51"}, + {file = "numpy-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:67d4cda6fa6ffa073b08c8372aa5fa767ceb10c9a0587c707505a6d426f4e046"}, + {file = "numpy-2.2.1-cp312-cp312-win32.whl", hash = "sha256:32cb94448be47c500d2c7a95f93e2f21a01f1fd05dd2beea1ccd049bb6001cd2"}, + {file = "numpy-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:ba5511d8f31c033a5fcbda22dd5c813630af98c70b2661f2d2c654ae3cdfcfc8"}, + {file = "numpy-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f1d09e520217618e76396377c81fba6f290d5f926f50c35f3a5f72b01a0da780"}, + {file = "numpy-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3ecc47cd7f6ea0336042be87d9e7da378e5c7e9b3c8ad0f7c966f714fc10d821"}, + {file = "numpy-2.2.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f419290bc8968a46c4933158c91a0012b7a99bb2e465d5ef5293879742f8797e"}, + {file = "numpy-2.2.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5b6c390bfaef8c45a260554888966618328d30e72173697e5cabe6b285fb2348"}, + {file = "numpy-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:526fc406ab991a340744aad7e25251dd47a6720a685fa3331e5c59fef5282a59"}, + {file = "numpy-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f74e6fdeb9a265624ec3a3918430205dff1df7e95a230779746a6af78bc615af"}, + {file = "numpy-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:53c09385ff0b72ba79d8715683c1168c12e0b6e84fb0372e97553d1ea91efe51"}, + {file = "numpy-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f3eac17d9ec51be534685ba877b6ab5edc3ab7ec95c8f163e5d7b39859524716"}, + {file = "numpy-2.2.1-cp313-cp313-win32.whl", hash = "sha256:9ad014faa93dbb52c80d8f4d3dcf855865c876c9660cb9bd7553843dd03a4b1e"}, + {file = "numpy-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:164a829b6aacf79ca47ba4814b130c4020b202522a93d7bff2202bfb33b61c60"}, + {file = "numpy-2.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4dfda918a13cc4f81e9118dea249e192ab167a0bb1966272d5503e39234d694e"}, + {file = "numpy-2.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:733585f9f4b62e9b3528dd1070ec4f52b8acf64215b60a845fa13ebd73cd0712"}, + {file = "numpy-2.2.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:89b16a18e7bba224ce5114db863e7029803c179979e1af6ad6a6b11f70545008"}, + {file = "numpy-2.2.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:676f4eebf6b2d430300f1f4f4c2461685f8269f94c89698d832cdf9277f30b84"}, + {file = "numpy-2.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f5cdf9f493b35f7e41e8368e7d7b4bbafaf9660cba53fb21d2cd174ec09631"}, + {file = "numpy-2.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1ad395cf254c4fbb5b2132fee391f361a6e8c1adbd28f2cd8e79308a615fe9d"}, + {file = "numpy-2.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:08ef779aed40dbc52729d6ffe7dd51df85796a702afbf68a4f4e41fafdc8bda5"}, + {file = "numpy-2.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:26c9c4382b19fcfbbed3238a14abf7ff223890ea1936b8890f058e7ba35e8d71"}, + {file = "numpy-2.2.1-cp313-cp313t-win32.whl", hash = "sha256:93cf4e045bae74c90ca833cba583c14b62cb4ba2cba0abd2b141ab52548247e2"}, + {file = "numpy-2.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:bff7d8ec20f5f42607599f9994770fa65d76edca264a87b5e4ea5629bce12268"}, + {file = "numpy-2.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7ba9cc93a91d86365a5d270dee221fdc04fb68d7478e6bf6af650de78a8339e3"}, + {file = "numpy-2.2.1-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:3d03883435a19794e41f147612a77a8f56d4e52822337844fff3d4040a142964"}, + {file = "numpy-2.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4511d9e6071452b944207c8ce46ad2f897307910b402ea5fa975da32e0102800"}, + {file = "numpy-2.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5c5cc0cbabe9452038ed984d05ac87910f89370b9242371bd9079cb4af61811e"}, + {file = "numpy-2.2.1.tar.gz", hash = "sha256:45681fd7128c8ad1c379f0ca0776a8b0c6583d2f69889ddac01559dfe4390918"}, +] + +[[package]] +name = "openai" +version = "1.76.0" +description = "The official Python library for the openai API" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "openai-1.76.0-py3-none-any.whl", hash = "sha256:a712b50e78cf78e6d7b2a8f69c4978243517c2c36999756673e07a14ce37dc0a"}, + {file = "openai-1.76.0.tar.gz", hash = "sha256:fd2bfaf4608f48102d6b74f9e11c5ecaa058b60dad9c36e409c12477dfd91fb2"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.11,<5" + +[package.extras] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +realtime = ["websockets (>=13,<16)"] +voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"] + +[[package]] +name = "openai-agents" +version = "0.0.13" +description = "OpenAI Agents SDK" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "openai_agents-0.0.13-py3-none-any.whl", hash = "sha256:e11910679e74803e8a4237ce52a21ee6f9ef0848d866e8198f5c4fb8c6310204"}, + {file = "openai_agents-0.0.13.tar.gz", hash = "sha256:6b80315e75c06b5302c5f2adba2f9ea3845f94615daed4706bfb871740f561a5"}, +] + +[package.dependencies] +griffe = ">=1.5.6,<2" +mcp = {version = ">=1.6.0,<2", markers = "python_version >= \"3.10\""} +openai = ">=1.76.0" +pydantic = ">=2.10,<3" +requests = ">=2.0,<3" +types-requests = ">=2.0,<3" +typing-extensions = ">=4.12.2,<5" + +[package.extras] +litellm = ["litellm (>=1.65.0,<2)"] +viz = ["graphviz (>=0.17)"] +voice = ["numpy (>=2.2.0,<3) ; python_version >= \"3.10\"", "websockets (>=15.0,<16)"] + +[[package]] +name = "openpyxl" +version = "3.1.5" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2"}, + {file = "openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050"}, +] + +[package.dependencies] +et-xmlfile = "*" + +[[package]] +name = "packaging" +version = "24.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "pandas" +version = "2.2.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "pbs-installer" +version = "2025.3.17" +description = "Installer for Python Build Standalone" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pbs_installer-2025.3.17-py3-none-any.whl", hash = "sha256:d2b0563b1d5d814e479f3c43d7aee019250f68a0a113d754714fa9a721f83b47"}, + {file = "pbs_installer-2025.3.17.tar.gz", hash = "sha256:dde058f925b989c1d3bd90739c16ffd0e68732f7716e4d1e01ca480d00a67560"}, +] + +[package.dependencies] +httpx = {version = ">=0.27.0,<1", optional = true, markers = "extra == \"download\""} +zstandard = {version = ">=0.21.0", optional = true, markers = "extra == \"install\""} + +[package.extras] +all = ["pbs-installer[download,install]"] +download = ["httpx (>=0.27.0,<1)"] +install = ["zstandard (>=0.21.0)"] + +[[package]] +name = "pkginfo" +version = "1.12.1.2" +description = "Query metadata from sdists / bdists / installed packages." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pkginfo-1.12.1.2-py3-none-any.whl", hash = "sha256:c783ac885519cab2c34927ccfa6bf64b5a704d7c69afaea583dd9b7afe969343"}, + {file = "pkginfo-1.12.1.2.tar.gz", hash = "sha256:5cd957824ac36f140260964eba3c6be6442a8359b8c48f4adf90210f33a04b7b"}, +] + +[package.extras] +testing = ["pytest", "pytest-cov", "wheel"] + +[[package]] +name = "platformdirs" +version = "4.3.7" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, + {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + +[[package]] +name = "poetry" +version = "2.1.1" +description = "Python dependency management and packaging made easy." +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "poetry-2.1.1-py3-none-any.whl", hash = "sha256:1d433880bd5b401327ddee789ccfe9ff197bf3b0cd240f0bc7cc99c84d14b16c"}, + {file = "poetry-2.1.1.tar.gz", hash = "sha256:d82673865bf13d6cd0dacf28c69a89670456d8df2f9e5da82bfb5f833ba00efc"}, +] + +[package.dependencies] +build = ">=1.2.1,<2.0.0" +cachecontrol = {version = ">=0.14.0,<0.15.0", extras = ["filecache"]} +cleo = ">=2.1.0,<3.0.0" +dulwich = ">=0.22.6,<0.23.0" +fastjsonschema = ">=2.18.0,<3.0.0" +findpython = ">=0.6.2,<0.7.0" +installer = ">=0.7.0,<0.8.0" +keyring = ">=25.1.0,<26.0.0" +packaging = ">=24.0" +pbs-installer = {version = ">=2025.1.6,<2026.0.0", extras = ["download", "install"]} +pkginfo = ">=1.12,<2.0" +platformdirs = ">=3.0.0,<5" +poetry-core = "2.1.1" +pyproject-hooks = ">=1.0.0,<2.0.0" +requests = ">=2.26,<3.0" +requests-toolbelt = ">=1.0.0,<2.0.0" +shellingham = ">=1.5,<2.0" +tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.11.4,<1.0.0" +trove-classifiers = ">=2022.5.19" +virtualenv = ">=20.26.6,<21.0.0" +xattr = {version = ">=1.0.0,<2.0.0", markers = "sys_platform == \"darwin\""} + +[[package]] +name = "poetry-core" +version = "2.1.1" +description = "Poetry PEP 517 Build Backend" +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "poetry_core-2.1.1-py3-none-any.whl", hash = "sha256:bc3b0382ab4d00d5d780277fd0aad1580eb4403613b37fc60fec407b5bee1fe6"}, + {file = "poetry_core-2.1.1.tar.gz", hash = "sha256:c1a1f6f00e4254742f40988a8caf665549101cf9991122cd5de1198897768b1a"}, +] + +[[package]] +name = "portalocker" +version = "2.10.1" +description = "Wraps the portalocker recipe for easy usage" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, + {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, +] + +[package.dependencies] +pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} + +[package.extras] +docs = ["sphinx (>=1.7.1)"] +redis = ["redis"] +tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] + +[[package]] +name = "priority" +version = "2.0.0" +description = "A pure-Python implementation of the HTTP/2 priority tree" +optional = false +python-versions = ">=3.6.1" +groups = ["main"] +files = [ + {file = "priority-2.0.0-py3-none-any.whl", hash = "sha256:6f8eefce5f3ad59baf2c080a664037bb4725cd0a790d53d59ab4059288faf6aa"}, + {file = "priority-2.0.0.tar.gz", hash = "sha256:c965d54f1b8d0d0b19479db3924c7c36cf672dbf2aec92d43fbdaf4492ba18c0"}, +] + +[[package]] +name = "propcache" +version = "0.3.0" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d"}, + {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c"}, + {file = "propcache-0.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3e7420211f5a65a54675fd860ea04173cde60a7cc20ccfbafcccd155225f8bc"}, + {file = "propcache-0.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3302c5287e504d23bb0e64d2a921d1eb4a03fb93a0a0aa3b53de059f5a5d737d"}, + {file = "propcache-0.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e2e068a83552ddf7a39a99488bcba05ac13454fb205c847674da0352602082f"}, + {file = "propcache-0.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d913d36bdaf368637b4f88d554fb9cb9d53d6920b9c5563846555938d5450bf"}, + {file = "propcache-0.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ee1983728964d6070ab443399c476de93d5d741f71e8f6e7880a065f878e0b9"}, + {file = "propcache-0.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:36ca5e9a21822cc1746023e88f5c0af6fce3af3b85d4520efb1ce4221bed75cc"}, + {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9ecde3671e62eeb99e977f5221abcf40c208f69b5eb986b061ccec317c82ebd0"}, + {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d383bf5e045d7f9d239b38e6acadd7b7fdf6c0087259a84ae3475d18e9a2ae8b"}, + {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8cb625bcb5add899cb8ba7bf716ec1d3e8f7cdea9b0713fa99eadf73b6d4986f"}, + {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5fa159dcee5dba00c1def3231c249cf261185189205073bde13797e57dd7540a"}, + {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7080b0159ce05f179cfac592cda1a82898ca9cd097dacf8ea20ae33474fbb25"}, + {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ed7161bccab7696a473fe7ddb619c1d75963732b37da4618ba12e60899fefe4f"}, + {file = "propcache-0.3.0-cp310-cp310-win32.whl", hash = "sha256:bf0d9a171908f32d54f651648c7290397b8792f4303821c42a74e7805bfb813c"}, + {file = "propcache-0.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:42924dc0c9d73e49908e35bbdec87adedd651ea24c53c29cac103ede0ea1d340"}, + {file = "propcache-0.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9ddd49258610499aab83b4f5b61b32e11fce873586282a0e972e5ab3bcadee51"}, + {file = "propcache-0.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2578541776769b500bada3f8a4eeaf944530516b6e90c089aa368266ed70c49e"}, + {file = "propcache-0.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8074c5dd61c8a3e915fa8fc04754fa55cfa5978200d2daa1e2d4294c1f136aa"}, + {file = "propcache-0.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b58229a844931bca61b3a20efd2be2a2acb4ad1622fc026504309a6883686fbf"}, + {file = "propcache-0.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e45377d5d6fefe1677da2a2c07b024a6dac782088e37c0b1efea4cfe2b1be19b"}, + {file = "propcache-0.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ec5060592d83454e8063e487696ac3783cc48c9a329498bafae0d972bc7816c9"}, + {file = "propcache-0.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15010f29fbed80e711db272909a074dc79858c6d28e2915704cfc487a8ac89c6"}, + {file = "propcache-0.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a254537b9b696ede293bfdbc0a65200e8e4507bc9f37831e2a0318a9b333c85c"}, + {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2b975528998de037dfbc10144b8aed9b8dd5a99ec547f14d1cb7c5665a43f075"}, + {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:19d36bb351ad5554ff20f2ae75f88ce205b0748c38b146c75628577020351e3c"}, + {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6032231d4a5abd67c7f71168fd64a47b6b451fbcb91c8397c2f7610e67683810"}, + {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6985a593417cdbc94c7f9c3403747335e450c1599da1647a5af76539672464d3"}, + {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6a1948df1bb1d56b5e7b0553c0fa04fd0e320997ae99689488201f19fa90d2e7"}, + {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8319293e85feadbbfe2150a5659dbc2ebc4afdeaf7d98936fb9a2f2ba0d4c35c"}, + {file = "propcache-0.3.0-cp311-cp311-win32.whl", hash = "sha256:63f26258a163c34542c24808f03d734b338da66ba91f410a703e505c8485791d"}, + {file = "propcache-0.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:cacea77ef7a2195f04f9279297684955e3d1ae4241092ff0cfcef532bb7a1c32"}, + {file = "propcache-0.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e53d19c2bf7d0d1e6998a7e693c7e87300dd971808e6618964621ccd0e01fe4e"}, + {file = "propcache-0.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a61a68d630e812b67b5bf097ab84e2cd79b48c792857dc10ba8a223f5b06a2af"}, + {file = "propcache-0.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fb91d20fa2d3b13deea98a690534697742029f4fb83673a3501ae6e3746508b5"}, + {file = "propcache-0.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67054e47c01b7b349b94ed0840ccae075449503cf1fdd0a1fdd98ab5ddc2667b"}, + {file = "propcache-0.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:997e7b8f173a391987df40f3b52c423e5850be6f6df0dcfb5376365440b56667"}, + {file = "propcache-0.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d663fd71491dde7dfdfc899d13a067a94198e90695b4321084c6e450743b8c7"}, + {file = "propcache-0.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8884ba1a0fe7210b775106b25850f5e5a9dc3c840d1ae9924ee6ea2eb3acbfe7"}, + {file = "propcache-0.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa806bbc13eac1ab6291ed21ecd2dd426063ca5417dd507e6be58de20e58dfcf"}, + {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6f4d7a7c0aff92e8354cceca6fe223973ddf08401047920df0fcb24be2bd5138"}, + {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9be90eebc9842a93ef8335291f57b3b7488ac24f70df96a6034a13cb58e6ff86"}, + {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bf15fc0b45914d9d1b706f7c9c4f66f2b7b053e9517e40123e137e8ca8958b3d"}, + {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5a16167118677d94bb48bfcd91e420088854eb0737b76ec374b91498fb77a70e"}, + {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:41de3da5458edd5678b0f6ff66691507f9885f5fe6a0fb99a5d10d10c0fd2d64"}, + {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:728af36011bb5d344c4fe4af79cfe186729efb649d2f8b395d1572fb088a996c"}, + {file = "propcache-0.3.0-cp312-cp312-win32.whl", hash = "sha256:6b5b7fd6ee7b54e01759f2044f936dcf7dea6e7585f35490f7ca0420fe723c0d"}, + {file = "propcache-0.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:2d15bc27163cd4df433e75f546b9ac31c1ba7b0b128bfb1b90df19082466ff57"}, + {file = "propcache-0.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a2b9bf8c79b660d0ca1ad95e587818c30ccdb11f787657458d6f26a1ea18c568"}, + {file = "propcache-0.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b0c1a133d42c6fc1f5fbcf5c91331657a1ff822e87989bf4a6e2e39b818d0ee9"}, + {file = "propcache-0.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bb2f144c6d98bb5cbc94adeb0447cfd4c0f991341baa68eee3f3b0c9c0e83767"}, + {file = "propcache-0.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1323cd04d6e92150bcc79d0174ce347ed4b349d748b9358fd2e497b121e03c8"}, + {file = "propcache-0.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b812b3cb6caacd072276ac0492d249f210006c57726b6484a1e1805b3cfeea0"}, + {file = "propcache-0.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:742840d1d0438eb7ea4280f3347598f507a199a35a08294afdcc560c3739989d"}, + {file = "propcache-0.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6e7e4f9167fddc438cd653d826f2222222564daed4116a02a184b464d3ef05"}, + {file = "propcache-0.3.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a94ffc66738da99232ddffcf7910e0f69e2bbe3a0802e54426dbf0714e1c2ffe"}, + {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3c6ec957025bf32b15cbc6b67afe233c65b30005e4c55fe5768e4bb518d712f1"}, + {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:549722908de62aa0b47a78b90531c022fa6e139f9166be634f667ff45632cc92"}, + {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5d62c4f6706bff5d8a52fd51fec6069bef69e7202ed481486c0bc3874912c787"}, + {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:24c04f8fbf60094c531667b8207acbae54146661657a1b1be6d3ca7773b7a545"}, + {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7c5f5290799a3f6539cc5e6f474c3e5c5fbeba74a5e1e5be75587746a940d51e"}, + {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4fa0e7c9c3cf7c276d4f6ab9af8adddc127d04e0fcabede315904d2ff76db626"}, + {file = "propcache-0.3.0-cp313-cp313-win32.whl", hash = "sha256:ee0bd3a7b2e184e88d25c9baa6a9dc609ba25b76daae942edfb14499ac7ec374"}, + {file = "propcache-0.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:1c8f7d896a16da9455f882870a507567d4f58c53504dc2d4b1e1d386dfe4588a"}, + {file = "propcache-0.3.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e560fd75aaf3e5693b91bcaddd8b314f4d57e99aef8a6c6dc692f935cc1e6bbf"}, + {file = "propcache-0.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:65a37714b8ad9aba5780325228598a5b16c47ba0f8aeb3dc0514701e4413d7c0"}, + {file = "propcache-0.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:07700939b2cbd67bfb3b76a12e1412405d71019df00ca5697ce75e5ef789d829"}, + {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c0fdbdf6983526e269e5a8d53b7ae3622dd6998468821d660d0daf72779aefa"}, + {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:794c3dd744fad478b6232289c866c25406ecdfc47e294618bdf1697e69bd64a6"}, + {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4544699674faf66fb6b4473a1518ae4999c1b614f0b8297b1cef96bac25381db"}, + {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddb8870bdb83456a489ab67c6b3040a8d5a55069aa6f72f9d872235fbc52f54"}, + {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f857034dc68d5ceb30fb60afb6ff2103087aea10a01b613985610e007053a121"}, + {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02df07041e0820cacc8f739510078f2aadcfd3fc57eaeeb16d5ded85c872c89e"}, + {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f47d52fd9b2ac418c4890aad2f6d21a6b96183c98021f0a48497a904199f006e"}, + {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9ff4e9ecb6e4b363430edf2c6e50173a63e0820e549918adef70515f87ced19a"}, + {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ecc2920630283e0783c22e2ac94427f8cca29a04cfdf331467d4f661f4072dac"}, + {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:c441c841e82c5ba7a85ad25986014be8d7849c3cfbdb6004541873505929a74e"}, + {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c929916cbdb540d3407c66f19f73387f43e7c12fa318a66f64ac99da601bcdf"}, + {file = "propcache-0.3.0-cp313-cp313t-win32.whl", hash = "sha256:0c3e893c4464ebd751b44ae76c12c5f5c1e4f6cbd6fbf67e3783cd93ad221863"}, + {file = "propcache-0.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:75e872573220d1ee2305b35c9813626e620768248425f58798413e9c39741f46"}, + {file = "propcache-0.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:03c091bb752349402f23ee43bb2bff6bd80ccab7c9df6b88ad4322258d6960fc"}, + {file = "propcache-0.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46ed02532cb66612d42ae5c3929b5e98ae330ea0f3900bc66ec5f4862069519b"}, + {file = "propcache-0.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11ae6a8a01b8a4dc79093b5d3ca2c8a4436f5ee251a9840d7790dccbd96cb649"}, + {file = "propcache-0.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df03cd88f95b1b99052b52b1bb92173229d7a674df0ab06d2b25765ee8404bce"}, + {file = "propcache-0.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03acd9ff19021bd0567582ac88f821b66883e158274183b9e5586f678984f8fe"}, + {file = "propcache-0.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd54895e4ae7d32f1e3dd91261df46ee7483a735017dc6f987904f194aa5fd14"}, + {file = "propcache-0.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a67e5c04e3119594d8cfae517f4b9330c395df07ea65eab16f3d559b7068fe"}, + {file = "propcache-0.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee25f1ac091def37c4b59d192bbe3a206298feeb89132a470325bf76ad122a1e"}, + {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:58e6d2a5a7cb3e5f166fd58e71e9a4ff504be9dc61b88167e75f835da5764d07"}, + {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:be90c94570840939fecedf99fa72839aed70b0ced449b415c85e01ae67422c90"}, + {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:49ea05212a529c2caffe411e25a59308b07d6e10bf2505d77da72891f9a05641"}, + {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:119e244ab40f70a98c91906d4c1f4c5f2e68bd0b14e7ab0a06922038fae8a20f"}, + {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:507c5357a8d8b4593b97fb669c50598f4e6cccbbf77e22fa9598aba78292b4d7"}, + {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8526b0941ec5a40220fc4dfde76aed58808e2b309c03e9fa8e2260083ef7157f"}, + {file = "propcache-0.3.0-cp39-cp39-win32.whl", hash = "sha256:7cedd25e5f678f7738da38037435b340694ab34d424938041aa630d8bac42663"}, + {file = "propcache-0.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:bf4298f366ca7e1ad1d21bbb58300a6985015909964077afd37559084590c929"}, + {file = "propcache-0.3.0-py3-none-any.whl", hash = "sha256:67dda3c7325691c2081510e92c561f465ba61b975f481735aefdfc845d2cd043"}, + {file = "propcache-0.3.0.tar.gz", hash = "sha256:a8fd93de4e1d278046345f49e2238cdb298589325849b2645d4a94c53faeffc5"}, +] + +[[package]] +name = "protobuf" +version = "5.29.4" +description = "" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7"}, + {file = "protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d"}, + {file = "protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922"}, + {file = "protobuf-5.29.4-cp38-cp38-win32.whl", hash = "sha256:1832f0515b62d12d8e6ffc078d7e9eb06969aa6dc13c13e1036e39d73bebc2de"}, + {file = "protobuf-5.29.4-cp38-cp38-win_amd64.whl", hash = "sha256:476cb7b14914c780605a8cf62e38c2a85f8caff2e28a6a0bad827ec7d6c85d68"}, + {file = "protobuf-5.29.4-cp39-cp39-win32.whl", hash = "sha256:fd32223020cb25a2cc100366f1dedc904e2d71d9322403224cdde5fdced0dabe"}, + {file = "protobuf-5.29.4-cp39-cp39-win_amd64.whl", hash = "sha256:678974e1e3a9b975b8bc2447fca458db5f93a2fb6b0c8db46b6675b5b5346812"}, + {file = "protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862"}, + {file = "protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99"}, +] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "2.10.5" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53"}, + {file = "pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.27.2" +typing-extensions = ">=4.12.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-settings" +version = "2.8.1" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c"}, + {file = "pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585"}, +] + +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" + +[package.extras] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "pyjwt" +version = "2.10.1" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pymongo" +version = "4.10.1" +description = "Python driver for MongoDB " +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pymongo-4.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e699aa68c4a7dea2ab5a27067f7d3e08555f8d2c0dc6a0c8c60cfd9ff2e6a4b1"}, + {file = "pymongo-4.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:70645abc714f06b4ad6b72d5bf73792eaad14e3a2cfe29c62a9c81ada69d9e4b"}, + {file = "pymongo-4.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae2fd94c9fe048c94838badcc6e992d033cb9473eb31e5710b3707cba5e8aee2"}, + {file = "pymongo-4.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ded27a4a5374dae03a92e084a60cdbcecd595306555bda553b833baf3fc4868"}, + {file = "pymongo-4.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ecc2455e3974a6c429687b395a0bc59636f2d6aedf5785098cf4e1f180f1c71"}, + {file = "pymongo-4.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920fee41f7d0259f5f72c1f1eb331bc26ffbdc952846f9bd8c3b119013bb52c"}, + {file = "pymongo-4.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0a15665b2d6cf364f4cd114d62452ce01d71abfbd9c564ba8c74dcd7bbd6822"}, + {file = "pymongo-4.10.1-cp310-cp310-win32.whl", hash = "sha256:29e1c323c28a4584b7095378ff046815e39ff82cdb8dc4cc6dfe3acf6f9ad1f8"}, + {file = "pymongo-4.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:88dc4aa45f8744ccfb45164aedb9a4179c93567bbd98a33109d7dc400b00eb08"}, + {file = "pymongo-4.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:57ee6becae534e6d47848c97f6a6dff69e3cce7c70648d6049bd586764febe59"}, + {file = "pymongo-4.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6f437a612f4d4f7aca1812311b1e84477145e950fdafe3285b687ab8c52541f3"}, + {file = "pymongo-4.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a970fd3117ab40a4001c3dad333bbf3c43687d90f35287a6237149b5ccae61d"}, + {file = "pymongo-4.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c4d0e7cd08ef9f8fbf2d15ba281ed55604368a32752e476250724c3ce36c72e"}, + {file = "pymongo-4.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca6f700cff6833de4872a4e738f43123db34400173558b558ae079b5535857a4"}, + {file = "pymongo-4.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cec237c305fcbeef75c0bcbe9d223d1e22a6e3ba1b53b2f0b79d3d29c742b45b"}, + {file = "pymongo-4.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3337804ea0394a06e916add4e5fac1c89902f1b6f33936074a12505cab4ff05"}, + {file = "pymongo-4.10.1-cp311-cp311-win32.whl", hash = "sha256:778ac646ce6ac1e469664062dfe9ae1f5c9961f7790682809f5ec3b8fda29d65"}, + {file = "pymongo-4.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:9df4ab5594fdd208dcba81be815fa8a8a5d8dedaf3b346cbf8b61c7296246a7a"}, + {file = "pymongo-4.10.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fbedc4617faa0edf423621bb0b3b8707836687161210d470e69a4184be9ca011"}, + {file = "pymongo-4.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7bd26b2aec8ceeb95a5d948d5cc0f62b0eb6d66f3f4230705c1e3d3d2c04ec76"}, + {file = "pymongo-4.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb104c3c2a78d9d85571c8ac90ec4f95bca9b297c6eee5ada71fabf1129e1674"}, + {file = "pymongo-4.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4924355245a9c79f77b5cda2db36e0f75ece5faf9f84d16014c0a297f6d66786"}, + {file = "pymongo-4.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11280809e5dacaef4971113f0b4ff4696ee94cfdb720019ff4fa4f9635138252"}, + {file = "pymongo-4.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5d55f2a82e5eb23795f724991cac2bffbb1c0f219c0ba3bf73a835f97f1bb2e"}, + {file = "pymongo-4.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e974ab16a60be71a8dfad4e5afccf8dd05d41c758060f5d5bda9a758605d9a5d"}, + {file = "pymongo-4.10.1-cp312-cp312-win32.whl", hash = "sha256:544890085d9641f271d4f7a47684450ed4a7344d6b72d5968bfae32203b1bb7c"}, + {file = "pymongo-4.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:dcc07b1277e8b4bf4d7382ca133850e323b7ab048b8353af496d050671c7ac52"}, + {file = "pymongo-4.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:90bc6912948dfc8c363f4ead54d54a02a15a7fee6cfafb36dc450fc8962d2cb7"}, + {file = "pymongo-4.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:594dd721b81f301f33e843453638e02d92f63c198358e5a0fa8b8d0b1218dabc"}, + {file = "pymongo-4.10.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0783e0c8e95397c84e9cf8ab092ab1e5dd7c769aec0ef3a5838ae7173b98dea0"}, + {file = "pymongo-4.10.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6fb6a72e88df46d1c1040fd32cd2d2c5e58722e5d3e31060a0393f04ad3283de"}, + {file = "pymongo-4.10.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e3a593333e20c87415420a4fb76c00b7aae49b6361d2e2205b6fece0563bf40"}, + {file = "pymongo-4.10.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72e2ace7456167c71cfeca7dcb47bd5dceda7db2231265b80fc625c5e8073186"}, + {file = "pymongo-4.10.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ad05eb9c97e4f589ed9e74a00fcaac0d443ccd14f38d1258eb4c39a35dd722b"}, + {file = "pymongo-4.10.1-cp313-cp313-win32.whl", hash = "sha256:ee4c86d8e6872a61f7888fc96577b0ea165eb3bdb0d841962b444fa36001e2bb"}, + {file = "pymongo-4.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:45ee87a4e12337353242bc758accc7fb47a2f2d9ecc0382a61e64c8f01e86708"}, + {file = "pymongo-4.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:442ca247f53ad24870a01e80a71cd81b3f2318655fd9d66748ee2bd1b1569d9e"}, + {file = "pymongo-4.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23e1d62df5592518204943b507be7b457fb8a4ad95a349440406fd42db5d0923"}, + {file = "pymongo-4.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6131bc6568b26e7495a9f3ef2b1700566b76bbecd919f4472bfe90038a61f425"}, + {file = "pymongo-4.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdeba88c540c9ed0338c0b2062d9f81af42b18d6646b3e6dda05cf6edd46ada9"}, + {file = "pymongo-4.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15a624d752dd3c89d10deb0ef6431559b6d074703cab90a70bb849ece02adc6b"}, + {file = "pymongo-4.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba164e73fdade9b4614a2497321c5b7512ddf749ed508950bdecc28d8d76a2d9"}, + {file = "pymongo-4.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9235fa319993405ae5505bf1333366388add2e06848db7b3deee8f990b69808e"}, + {file = "pymongo-4.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e4a65567bd17d19f03157c7ec992c6530eafd8191a4e5ede25566792c4fe3fa2"}, + {file = "pymongo-4.10.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f1945d48fb9b8a87d515da07f37e5b2c35b364a435f534c122e92747881f4a7c"}, + {file = "pymongo-4.10.1-cp38-cp38-win32.whl", hash = "sha256:345f8d340802ebce509f49d5833cc913da40c82f2e0daf9f60149cacc9ca680f"}, + {file = "pymongo-4.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:3a70d5efdc0387ac8cd50f9a5f379648ecfc322d14ec9e1ba8ec957e5d08c372"}, + {file = "pymongo-4.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15b1492cc5c7cd260229590be7218261e81684b8da6d6de2660cf743445500ce"}, + {file = "pymongo-4.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95207503c41b97e7ecc7e596d84a61f441b4935f11aa8332828a754e7ada8c82"}, + {file = "pymongo-4.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb99f003c720c6d83be02c8f1a7787c22384a8ca9a4181e406174db47a048619"}, + {file = "pymongo-4.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f2bc1ee4b1ca2c4e7e6b7a5e892126335ec8d9215bcd3ac2fe075870fefc3358"}, + {file = "pymongo-4.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93a0833c10a967effcd823b4e7445ec491f0bf6da5de0ca33629c0528f42b748"}, + {file = "pymongo-4.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f56707497323150bd2ed5d63067f4ffce940d0549d4ea2dfae180deec7f9363"}, + {file = "pymongo-4.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:409ab7d6c4223e5c85881697f365239dd3ed1b58f28e4124b846d9d488c86880"}, + {file = "pymongo-4.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dac78a650dc0637d610905fd06b5fa6419ae9028cf4d04d6a2657bc18a66bbce"}, + {file = "pymongo-4.10.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1ec3fa88b541e0481aff3c35194c9fac96e4d57ec5d1c122376000eb28c01431"}, + {file = "pymongo-4.10.1-cp39-cp39-win32.whl", hash = "sha256:e0e961923a7b8a1c801c43552dcb8153e45afa41749d9efbd3a6d33f45489f7a"}, + {file = "pymongo-4.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:dabe8bf1ad644e6b93f3acf90ff18536d94538ca4d27e583c6db49889e98e48f"}, + {file = "pymongo-4.10.1.tar.gz", hash = "sha256:a9de02be53b6bb98efe0b9eda84ffa1ec027fcb23a2de62c4f941d9a2f2f3330"}, +] + +[package.dependencies] +dnspython = ">=1.16.0,<3.0.0" + +[package.extras] +aws = ["pymongo-auth-aws (>=1.1.0,<2.0.0)"] +docs = ["furo (==2023.9.10)", "readthedocs-sphinx-search (>=0.3,<1.0)", "sphinx (>=5.3,<8)", "sphinx-autobuild (>=2020.9.1)", "sphinx-rtd-theme (>=2,<3)", "sphinxcontrib-shellcheck (>=1,<2)"] +encryption = ["certifi ; os_name == \"nt\" or sys_platform == \"darwin\"", "pymongo-auth-aws (>=1.1.0,<2.0.0)", "pymongocrypt (>=1.10.0,<2.0.0)"] +gssapi = ["pykerberos ; os_name != \"nt\"", "winkerberos (>=0.5.0) ; os_name == \"nt\""] +ocsp = ["certifi ; os_name == \"nt\" or sys_platform == \"darwin\"", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] +snappy = ["python-snappy"] +test = ["pytest (>=8.2)", "pytest-asyncio (>=0.24.0)"] +zstd = ["zstandard"] + +[[package]] +name = "pyproject-hooks" +version = "1.2.0" +description = "Wrappers to call pyproject.toml-based build backend hooks." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913"}, + {file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"}, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-docx" +version = "1.1.2" +description = "Create, read, and update Microsoft Word .docx files." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "python_docx-1.1.2-py3-none-any.whl", hash = "sha256:08c20d6058916fb19853fcf080f7f42b6270d89eac9fa5f8c15f691c0017fabe"}, + {file = "python_docx-1.1.2.tar.gz", hash = "sha256:0cf1f22e95b9002addca7948e16f2cd7acdfd498047f1941ca5d293db7762efd"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +typing-extensions = ">=4.9.0" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pytz" +version = "2024.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, +] + +[[package]] +name = "pywin32" +version = "310" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +groups = ["main"] +markers = "platform_system == \"Windows\"" +files = [ + {file = "pywin32-310-cp310-cp310-win32.whl", hash = "sha256:6dd97011efc8bf51d6793a82292419eba2c71cf8e7250cfac03bba284454abc1"}, + {file = "pywin32-310-cp310-cp310-win_amd64.whl", hash = "sha256:c3e78706e4229b915a0821941a84e7ef420bf2b77e08c9dae3c76fd03fd2ae3d"}, + {file = "pywin32-310-cp310-cp310-win_arm64.whl", hash = "sha256:33babed0cf0c92a6f94cc6cc13546ab24ee13e3e800e61ed87609ab91e4c8213"}, + {file = "pywin32-310-cp311-cp311-win32.whl", hash = "sha256:1e765f9564e83011a63321bb9d27ec456a0ed90d3732c4b2e312b855365ed8bd"}, + {file = "pywin32-310-cp311-cp311-win_amd64.whl", hash = "sha256:126298077a9d7c95c53823934f000599f66ec9296b09167810eb24875f32689c"}, + {file = "pywin32-310-cp311-cp311-win_arm64.whl", hash = "sha256:19ec5fc9b1d51c4350be7bb00760ffce46e6c95eaf2f0b2f1150657b1a43c582"}, + {file = "pywin32-310-cp312-cp312-win32.whl", hash = "sha256:8a75a5cc3893e83a108c05d82198880704c44bbaee4d06e442e471d3c9ea4f3d"}, + {file = "pywin32-310-cp312-cp312-win_amd64.whl", hash = "sha256:bf5c397c9a9a19a6f62f3fb821fbf36cac08f03770056711f765ec1503972060"}, + {file = "pywin32-310-cp312-cp312-win_arm64.whl", hash = "sha256:2349cc906eae872d0663d4d6290d13b90621eaf78964bb1578632ff20e152966"}, + {file = "pywin32-310-cp313-cp313-win32.whl", hash = "sha256:5d241a659c496ada3253cd01cfaa779b048e90ce4b2b38cd44168ad555ce74ab"}, + {file = "pywin32-310-cp313-cp313-win_amd64.whl", hash = "sha256:667827eb3a90208ddbdcc9e860c81bde63a135710e21e4cb3348968e4bd5249e"}, + {file = "pywin32-310-cp313-cp313-win_arm64.whl", hash = "sha256:e308f831de771482b7cf692a1f308f8fca701b2d8f9dde6cc440c7da17e47b33"}, + {file = "pywin32-310-cp38-cp38-win32.whl", hash = "sha256:0867beb8addefa2e3979d4084352e4ac6e991ca45373390775f7084cc0209b9c"}, + {file = "pywin32-310-cp38-cp38-win_amd64.whl", hash = "sha256:30f0a9b3138fb5e07eb4973b7077e1883f558e40c578c6925acc7a94c34eaa36"}, + {file = "pywin32-310-cp39-cp39-win32.whl", hash = "sha256:851c8d927af0d879221e616ae1f66145253537bbdd321a77e8ef701b443a9a1a"}, + {file = "pywin32-310-cp39-cp39-win_amd64.whl", hash = "sha256:96867217335559ac619f00ad70e513c0fcf84b8a3af9fc2bba3b59b97da70475"}, +] + +[[package]] +name = "pywin32-ctypes" +version = "0.2.3" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" +optional = false +python-versions = ">=3.6" +groups = ["main"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755"}, + {file = "pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8"}, +] + +[[package]] +name = "qdrant-client" +version = "1.13.3" +description = "Client library for the Qdrant vector search engine" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "qdrant_client-1.13.3-py3-none-any.whl", hash = "sha256:f52cacbb936e547d3fceb1aaed3e3c56be0ebfd48e8ea495ea3dbc89c671d1d2"}, + {file = "qdrant_client-1.13.3.tar.gz", hash = "sha256:61ca09e07c6d7ac0dfbdeb13dca4fe5f3e08fa430cb0d74d66ef5d023a70adfc"}, +] + +[package.dependencies] +grpcio = ">=1.41.0" +grpcio-tools = ">=1.41.0" +httpx = {version = ">=0.20.0", extras = ["http2"]} +numpy = [ + {version = ">=1.21", markers = "python_version >= \"3.10\" and python_version < \"3.12\""}, + {version = ">=1.26", markers = "python_version == \"3.12\""}, + {version = ">=2.1.0", markers = "python_version >= \"3.13\""}, +] +portalocker = ">=2.7.0,<3.0.0" +pydantic = ">=1.10.8" +urllib3 = ">=1.26.14,<3" + +[package.extras] +fastembed = ["fastembed (==0.5.1)"] +fastembed-gpu = ["fastembed-gpu (==0.5.1)"] + +[[package]] +name = "quart" +version = "0.20.0" +description = "A Python ASGI web framework with the same API as Flask" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "quart-0.20.0-py3-none-any.whl", hash = "sha256:003c08f551746710acb757de49d9b768986fd431517d0eb127380b656b98b8f1"}, + {file = "quart-0.20.0.tar.gz", hash = "sha256:08793c206ff832483586f5ae47018c7e40bdd75d886fee3fabbdaa70c2cf505d"}, +] + +[package.dependencies] +aiofiles = "*" +blinker = ">=1.6" +click = ">=8.0" +flask = ">=3.0" +hypercorn = ">=0.11.2" +itsdangerous = "*" +jinja2 = "*" +markupsafe = "*" +werkzeug = ">=3.0" + +[package.extras] +dotenv = ["python-dotenv"] + +[[package]] +name = "rapidfuzz" +version = "3.12.2" +description = "rapid fuzzy string matching" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "rapidfuzz-3.12.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0b9a75e0385a861178adf59e86d6616cbd0d5adca7228dc9eeabf6f62cf5b0b1"}, + {file = "rapidfuzz-3.12.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6906a7eb458731e3dd2495af1d0410e23a21a2a2b7ced535e6d5cd15cb69afc5"}, + {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4b3334a8958b689f292d5ce8a928140ac98919b51e084f04bf0c14276e4c6ba"}, + {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:85a54ce30345cff2c79cbcffa063f270ad1daedd0d0c3ff6e541d3c3ba4288cf"}, + {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acb63c5072c08058f8995404201a52fc4e1ecac105548a4d03c6c6934bda45a3"}, + {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5385398d390c6571f0f2a7837e6ddde0c8b912dac096dc8c87208ce9aaaa7570"}, + {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5032cbffa245b4beba0067f8ed17392ef2501b346ae3c1f1d14b950edf4b6115"}, + {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:195adbb384d89d6c55e2fd71e7fb262010f3196e459aa2f3f45f31dd7185fe72"}, + {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f43b773a4d4950606fb25568ecde5f25280daf8f97b87eb323e16ecd8177b328"}, + {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:55a43be0e0fa956a919043c19d19bd988991d15c59f179d413fe5145ed9deb43"}, + {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:71cf1ea16acdebe9e2fb62ee7a77f8f70e877bebcbb33b34e660af2eb6d341d9"}, + {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a3692d4ab36d44685f61326dca539975a4eda49b2a76f0a3df177d8a2c0de9d2"}, + {file = "rapidfuzz-3.12.2-cp310-cp310-win32.whl", hash = "sha256:09227bd402caa4397ba1d6e239deea635703b042dd266a4092548661fb22b9c6"}, + {file = "rapidfuzz-3.12.2-cp310-cp310-win_amd64.whl", hash = "sha256:0f05b7b95f9f87254b53fa92048367a8232c26cee7fc8665e4337268c3919def"}, + {file = "rapidfuzz-3.12.2-cp310-cp310-win_arm64.whl", hash = "sha256:6938738e00d9eb6e04097b3f565097e20b0c398f9c58959a2bc64f7f6be3d9da"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e9c4d984621ae17404c58f8d06ed8b025e167e52c0e6a511dfec83c37e9220cd"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f9132c55d330f0a1d34ce6730a76805323a6250d97468a1ca766a883d6a9a25"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b343b6cb4b2c3dbc8d2d4c5ee915b6088e3b144ddf8305a57eaab16cf9fc74"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24081077b571ec4ee6d5d7ea0e49bc6830bf05b50c1005028523b9cd356209f3"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c988a4fc91856260355773bf9d32bebab2083d4c6df33fafeddf4330e5ae9139"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:780b4469ee21cf62b1b2e8ada042941fd2525e45d5fb6a6901a9798a0e41153c"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edd84b0a323885493c893bad16098c5e3b3005d7caa995ae653da07373665d97"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efa22059c765b3d8778083805b199deaaf643db070f65426f87d274565ddf36a"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:095776b11bb45daf7c2973dd61cc472d7ea7f2eecfa454aef940b4675659b92f"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7e2574cf4aa86065600b664a1ac7b8b8499107d102ecde836aaaa403fc4f1784"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:d5a3425a6c50fd8fbd991d8f085ddb504791dae6ef9cc3ab299fea2cb5374bef"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:97fb05e1ddb7b71a054040af588b0634214ee87cea87900d309fafc16fd272a4"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-win32.whl", hash = "sha256:b4c5a0413589aef936892fbfa94b7ff6f7dd09edf19b5a7b83896cc9d4e8c184"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-win_amd64.whl", hash = "sha256:58d9ae5cf9246d102db2a2558b67fe7e73c533e5d769099747921232d88b9be2"}, + {file = "rapidfuzz-3.12.2-cp311-cp311-win_arm64.whl", hash = "sha256:7635fe34246cd241c8e35eb83084e978b01b83d5ef7e5bf72a704c637f270017"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1d982a651253ffe8434d9934ff0c1089111d60502228464721a2a4587435e159"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:02e6466caa0222d5233b1f05640873671cd99549a5c5ba4c29151634a1e56080"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e956b3f053e474abae69ac693a52742109d860ac2375fe88e9387d3277f4c96c"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dee7d740a2d5418d4f964f39ab8d89923e6b945850db833e798a1969b19542a"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a057cdb0401e42c84b6516c9b1635f7aedd5e430c6e388bd5f6bcd1d6a0686bb"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dccf8d4fb5b86d39c581a59463c596b1d09df976da26ff04ae219604223d502f"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21d5b3793c6f5aecca595cd24164bf9d3c559e315ec684f912146fc4e769e367"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:46a616c0e13cff2de1761b011e0b14bb73b110182f009223f1453d505c9a975c"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19fa5bc4301a1ee55400d4a38a8ecf9522b0391fc31e6da5f4d68513fe5c0026"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:544a47190a0d25971658a9365dba7095397b4ce3e897f7dd0a77ca2cf6fa984e"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f21af27c5e001f0ba1b88c36a0936437dfe034c452548d998891c21125eb640f"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b63170d9db00629b5b3f2862114d8d6ee19127eaba0eee43762d62a25817dbe0"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-win32.whl", hash = "sha256:6c7152d77b2eb6bfac7baa11f2a9c45fd5a2d848dbb310acd0953b3b789d95c9"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-win_amd64.whl", hash = "sha256:1a314d170ee272ac87579f25a6cf8d16a031e1f7a7b07663434b41a1473bc501"}, + {file = "rapidfuzz-3.12.2-cp312-cp312-win_arm64.whl", hash = "sha256:d41e8231326e94fd07c4d8f424f6bed08fead6f5e6688d1e6e787f1443ae7631"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:941f31038dba5d3dedcfcceba81d61570ad457c873a24ceb13f4f44fcb574260"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fe2dfc454ee51ba168a67b1e92b72aad251e45a074972cef13340bbad2fd9438"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78fafaf7f5a48ee35ccd7928339080a0136e27cf97396de45259eca1d331b714"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0c7989ff32c077bb8fd53253fd6ca569d1bfebc80b17557e60750e6909ba4fe"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96fa00bc105caa34b6cd93dca14a29243a3a7f0c336e4dcd36348d38511e15ac"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bccfb30c668620c5bc3490f2dc7d7da1cca0ead5a9da8b755e2e02e2ef0dff14"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f9b0adc3d894beb51f5022f64717b6114a6fabaca83d77e93ac7675911c8cc5"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:32691aa59577f42864d5535cb6225d0f47e2c7bff59cf4556e5171e96af68cc1"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:758b10380ad34c1f51753a070d7bb278001b5e6fcf544121c6df93170952d705"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:50a9c54c0147b468363119132d514c5024fbad1ed8af12bd8bd411b0119f9208"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e3ceb87c11d2d0fbe8559bb795b0c0604b84cfc8bb7b8720b5c16e9e31e00f41"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f7c9a003002434889255ff5676ca0f8934a478065ab5e702f75dc42639505bba"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-win32.whl", hash = "sha256:cf165a76870cd875567941cf861dfd361a0a6e6a56b936c5d30042ddc9def090"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-win_amd64.whl", hash = "sha256:55bcc003541f5f16ec0a73bf6de758161973f9e8d75161954380738dd147f9f2"}, + {file = "rapidfuzz-3.12.2-cp313-cp313-win_arm64.whl", hash = "sha256:69f6ecdf1452139f2b947d0c169a605de578efdb72cbb2373cb0a94edca1fd34"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c852cd8bed1516a64fd6e2d4c6f270d4356196ee03fda2af1e5a9e13c34643"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:42e7f747b55529a6d0d1588695d71025e884ab48664dca54b840413dea4588d8"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a749fd2690f24ef256b264a781487746bbb95344364fe8fe356f0eef7ef206ba"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a11e1d036170bbafa43a9e63d8c309273564ec5bdfc5439062f439d1a16965a"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dfb337f1832c1231e3d5621bd0ebebb854e46036aedae3e6a49c1fc08f16f249"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e88c6e68fca301722fa3ab7fd3ca46998012c14ada577bc1e2c2fc04f2067ca6"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17e1a3a8b4b5125cfb63a6990459b25b87ea769bdaf90d05bb143f8febef076a"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9f8177b24ccc0a843e85932b1088c5e467a7dd7a181c13f84c684b796bea815"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6c506bdc2f304051592c0d3b0e82eed309248ec10cdf802f13220251358375ea"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:30bf15c1ecec2798b713d551df17f23401a3e3653ad9ed4e83ad1c2b06e86100"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:bd9a67cfc83e8453ef17ddd1c2c4ce4a74d448a197764efb54c29f29fb41f611"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7a6eaec2ef658dd650c6eb9b36dff7a361ebd7d8bea990ce9d639b911673b2cb"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-win32.whl", hash = "sha256:d7701769f110332cde45c41759cb2a497de8d2dca55e4c519a46aed5fbb19d1a"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-win_amd64.whl", hash = "sha256:296bf0fd4f678488670e262c87a3e4f91900b942d73ae38caa42a417e53643b1"}, + {file = "rapidfuzz-3.12.2-cp39-cp39-win_arm64.whl", hash = "sha256:7957f5d768de14f6b2715303ccdf224b78416738ee95a028a2965c95f73afbfb"}, + {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5fd3ce849b27d063755829cda27a9dab6dbd63be3801f2a40c60ec563a4c90f"}, + {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:54e53662d71ed660c83c5109127c8e30b9e607884b7c45d2aff7929bbbd00589"}, + {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b9e43cf2213e524f3309d329f1ad8dbf658db004ed44f6ae1cd2919aa997da5"}, + {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29ca445e320e5a8df3bd1d75b4fa4ecfa7c681942b9ac65b55168070a1a1960e"}, + {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83eb7ef732c2f8533c6b5fbe69858a722c218acc3e1fc190ab6924a8af7e7e0e"}, + {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:648adc2dd2cf873efc23befcc6e75754e204a409dfa77efd0fea30d08f22ef9d"}, + {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9b1e6f48e1ffa0749261ee23a1c6462bdd0be5eac83093f4711de17a42ae78ad"}, + {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:1ae9ded463f2ca4ba1eb762913c5f14c23d2e120739a62b7f4cc102eab32dc90"}, + {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dda45f47b559be72ecbce45c7f71dc7c97b9772630ab0f3286d97d2c3025ab71"}, + {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3745c6443890265513a3c8777f2de4cb897aeb906a406f97741019be8ad5bcc"}, + {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36d3ef4f047ed1bc96fa29289f9e67a637ddca5e4f4d3dc7cb7f50eb33ec1664"}, + {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:54bb69ebe5ca0bd7527357e348f16a4c0c52fe0c2fcc8a041010467dcb8385f7"}, + {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3f2ddd5b99b254039a8c82be5749d4d75943f62eb2c2918acf6ffd586852834f"}, + {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8117dab9b26a1aaffab59b4e30f80ac4d55e61ad4139a637c149365960933bee"}, + {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40c0f16d62d6553527de3dab2fb69709c4383430ea44bce8fb4711ed4cbc6ae3"}, + {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f177e1eb6e4f5261a89c475e21bce7a99064a8f217d2336fb897408f46f0ceaf"}, + {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df0cecc2852fcb078ed1b4482fac4fc2c2e7787f3edda8920d9a4c0f51b1c95"}, + {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b3c4df0321df6f8f0b61afbaa2ced9622750ee1e619128db57a18533d139820"}, + {file = "rapidfuzz-3.12.2.tar.gz", hash = "sha256:b0ba1ccc22fff782e7152a3d3d0caca44ec4e32dc48ba01c560b8593965b5aa3"}, +] + +[package.extras] +all = ["numpy"] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "secretstorage" +version = "3.3.3" +description = "Python bindings to FreeDesktop.org Secret Service API" +optional = false +python-versions = ">=3.6" +groups = ["main"] +markers = "sys_platform == \"linux\"" +files = [ + {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, + {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, +] + +[package.dependencies] +cryptography = ">=2.0" +jeepney = ">=0.6" + +[[package]] +name = "setuptools" +version = "75.8.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, + {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "soupsieve" +version = "2.6" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, +] + +[[package]] +name = "sse-starlette" +version = "2.2.1" +description = "SSE plugin for Starlette" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "sse_starlette-2.2.1-py3-none-any.whl", hash = "sha256:6410a3d3ba0c89e7675d4c273a301d64649c03a5ef1ca101f10b47f895fd0e99"}, + {file = "sse_starlette-2.2.1.tar.gz", hash = "sha256:54470d5f19274aeed6b2d473430b08b4b379ea851d953b11d7f1c4a2c118b419"}, +] + +[package.dependencies] +anyio = ">=4.7.0" +starlette = ">=0.41.3" + +[package.extras] +examples = ["fastapi"] +uvicorn = ["uvicorn (>=0.34.0)"] + +[[package]] +name = "starlette" +version = "0.46.1" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227"}, + {file = "starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230"}, +] + +[package.dependencies] +anyio = ">=3.6.2,<5" + +[package.extras] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "taskgroup" +version = "0.2.2" +description = "backport of asyncio.TaskGroup, asyncio.Runner and asyncio.timeout" +optional = false +python-versions = "*" +groups = ["main"] +markers = "python_version == \"3.10\"" +files = [ + {file = "taskgroup-0.2.2-py2.py3-none-any.whl", hash = "sha256:e2c53121609f4ae97303e9ea1524304b4de6faf9eb2c9280c7f87976479a52fb"}, + {file = "taskgroup-0.2.2.tar.gz", hash = "sha256:078483ac3e78f2e3f973e2edbf6941374fbea81b9c5d0a96f51d297717f4752d"}, +] + +[package.dependencies] +exceptiongroup = "*" +typing_extensions = ">=4.12.2,<5" + +[[package]] +name = "tomli" +version = "2.2.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version == \"3.10\"" +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] + +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] + +[[package]] +name = "tqdm" +version = "4.67.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] +discord = ["requests"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "trove-classifiers" +version = "2025.3.19.19" +description = "Canonical source for classifiers on PyPI (pypi.org)." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "trove_classifiers-2025.3.19.19-py3-none-any.whl", hash = "sha256:5fc02770ecd81588a605ac98b9d85d50a5a3f9daa30af2a6b1361a1999d75d07"}, + {file = "trove_classifiers-2025.3.19.19.tar.gz", hash = "sha256:98e9d396fe908d5f43b7454fa4c43d17cd0fdadf046f45fb38a5e3af8d959ecd"}, +] + +[[package]] +name = "types-requests" +version = "2.32.0.20250328" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "types_requests-2.32.0.20250328-py3-none-any.whl", hash = "sha256:72ff80f84b15eb3aa7a8e2625fffb6a93f2ad5a0c20215fc1dcfa61117bcb2a2"}, + {file = "types_requests-2.32.0.20250328.tar.gz", hash = "sha256:c9e67228ea103bd811c96984fac36ed2ae8da87a36a633964a21f199d60baf32"}, +] + +[package.dependencies] +urllib3 = ">=2" + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "tzdata" +version = "2024.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["main"] +files = [ + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, +] + +[[package]] +name = "urllib3" +version = "2.3.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvicorn" +version = "0.34.0" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4"}, + {file = "uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} + +[package.extras] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "virtualenv" +version = "20.29.3" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "virtualenv-20.29.3-py3-none-any.whl", hash = "sha256:3e3d00f5807e83b234dfb6122bf37cfadf4be216c53a49ac059d02414f819170"}, + {file = "virtualenv-20.29.3.tar.gz", hash = "sha256:95e39403fcf3940ac45bc717597dba16110b74506131845d9b687d5e73d947ac"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] + +[[package]] +name = "waitress" +version = "2.1.2" +description = "Waitress WSGI server" +optional = false +python-versions = ">=3.7.0" +groups = ["main"] +files = [ + {file = "waitress-2.1.2-py3-none-any.whl", hash = "sha256:7500c9625927c8ec60f54377d590f67b30c8e70ef4b8894214ac6e4cad233d2a"}, + {file = "waitress-2.1.2.tar.gz", hash = "sha256:780a4082c5fbc0fde6a2fcfe5e26e6efc1e8f425730863c04085769781f51eba"}, +] + +[package.extras] +docs = ["Sphinx (>=1.8.1)", "docutils", "pylons-sphinx-themes (>=1.0.9)"] +testing = ["coverage (>=5.0)", "pytest", "pytest-cover"] + +[[package]] +name = "websockets" +version = "13.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, + {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, + {file = "websockets-13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f"}, + {file = "websockets-13.1-cp310-cp310-win32.whl", hash = "sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe"}, + {file = "websockets-13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a"}, + {file = "websockets-13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19"}, + {file = "websockets-13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5"}, + {file = "websockets-13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9"}, + {file = "websockets-13.1-cp311-cp311-win32.whl", hash = "sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f"}, + {file = "websockets-13.1-cp311-cp311-win_amd64.whl", hash = "sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557"}, + {file = "websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc"}, + {file = "websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49"}, + {file = "websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf"}, + {file = "websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c"}, + {file = "websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3"}, + {file = "websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6"}, + {file = "websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708"}, + {file = "websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6"}, + {file = "websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d"}, + {file = "websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2"}, + {file = "websockets-13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c7934fd0e920e70468e676fe7f1b7261c1efa0d6c037c6722278ca0228ad9d0d"}, + {file = "websockets-13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:149e622dc48c10ccc3d2760e5f36753db9cacf3ad7bc7bbbfd7d9c819e286f23"}, + {file = "websockets-13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a569eb1b05d72f9bce2ebd28a1ce2054311b66677fcd46cf36204ad23acead8c"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95df24ca1e1bd93bbca51d94dd049a984609687cb2fb08a7f2c56ac84e9816ea"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8dbb1bf0c0a4ae8b40bdc9be7f644e2f3fb4e8a9aca7145bfa510d4a374eeb7"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:035233b7531fb92a76beefcbf479504db8c72eb3bff41da55aecce3a0f729e54"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e4450fc83a3df53dec45922b576e91e94f5578d06436871dce3a6be38e40f5db"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:463e1c6ec853202dd3657f156123d6b4dad0c546ea2e2e38be2b3f7c5b8e7295"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6d6855bbe70119872c05107e38fbc7f96b1d8cb047d95c2c50869a46c65a8e96"}, + {file = "websockets-13.1-cp38-cp38-win32.whl", hash = "sha256:204e5107f43095012b00f1451374693267adbb832d29966a01ecc4ce1db26faf"}, + {file = "websockets-13.1-cp38-cp38-win_amd64.whl", hash = "sha256:485307243237328c022bc908b90e4457d0daa8b5cf4b3723fd3c4a8012fce4c6"}, + {file = "websockets-13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d"}, + {file = "websockets-13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7"}, + {file = "websockets-13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5"}, + {file = "websockets-13.1-cp39-cp39-win32.whl", hash = "sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c"}, + {file = "websockets-13.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d"}, + {file = "websockets-13.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238"}, + {file = "websockets-13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a"}, + {file = "websockets-13.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23"}, + {file = "websockets-13.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9156c45750b37337f7b0b00e6248991a047be4aa44554c9886fe6bdd605aab3b"}, + {file = "websockets-13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80c421e07973a89fbdd93e6f2003c17d20b69010458d3a8e37fb47874bd67d51"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82d0ba76371769d6a4e56f7e83bb8e81846d17a6190971e38b5de108bde9b0d7"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9875a0143f07d74dc5e1ded1c4581f0d9f7ab86c78994e2ed9e95050073c94d"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11e38ad8922c7961447f35c7b17bffa15de4d17c70abd07bfbe12d6faa3e027"}, + {file = "websockets-13.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4059f790b6ae8768471cddb65d3c4fe4792b0ab48e154c9f0a04cefaabcd5978"}, + {file = "websockets-13.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e"}, + {file = "websockets-13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20"}, + {file = "websockets-13.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678"}, + {file = "websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f"}, + {file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"}, +] + +[[package]] +name = "werkzeug" +version = "3.1.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, + {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "wheel" +version = "0.44.0" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "wheel-0.44.0-py3-none-any.whl", hash = "sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f"}, + {file = "wheel-0.44.0.tar.gz", hash = "sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + +[[package]] +name = "wsproto" +version = "1.2.0" +description = "WebSockets state-machine based protocol implementation" +optional = false +python-versions = ">=3.7.0" +groups = ["main"] +files = [ + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, +] + +[package.dependencies] +h11 = ">=0.9.0,<1" + +[[package]] +name = "xattr" +version = "1.1.4" +description = "Python wrapper for extended filesystem attributes" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "xattr-1.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:acb85b6249e9f3ea10cbb56df1021d43f4027212f0d004304bc9075dc7f54769"}, + {file = "xattr-1.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1a848ab125c0fafdc501ccd83b4c9018bba576a037a4ca5960a22f39e295552e"}, + {file = "xattr-1.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:467ee77471d26ae5187ee7081b82175b5ca56ead4b71467ec2e6119d1b08beed"}, + {file = "xattr-1.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fd35f46cb0154f7033f9d5d0960f226857acb0d1e0d71fd7af18ed84663007c"}, + {file = "xattr-1.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d956478e9bb98a1efd20ebc6e5703497c1d2d690d5a13c4df4abf59881eed50"}, + {file = "xattr-1.1.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f25dfdcd974b700fb04a40e14a664a80227ee58e02ea062ac241f0d7dc54b4e"}, + {file = "xattr-1.1.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:33b63365c1fcbc80a79f601575bac0d6921732e0245b776876f3db3fcfefe22d"}, + {file = "xattr-1.1.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:544542be95c9b49e211f0a463758f200de88ba6d5a94d3c4f42855a484341acd"}, + {file = "xattr-1.1.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac14c9893f3ea046784b7702be30889b200d31adcd2e6781a8a190b6423f9f2d"}, + {file = "xattr-1.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bb4bbe37ba95542081890dd34fa5347bef4651e276647adaa802d5d0d7d86452"}, + {file = "xattr-1.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3da489ecef798705f9a39ea8cea4ead0d1eeed55f92c345add89740bd930bab6"}, + {file = "xattr-1.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:798dd0cbe696635a6f74b06fc430818bf9c3b24314e1502eadf67027ab60c9b0"}, + {file = "xattr-1.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2b6361626efad5eb5a6bf8172c6c67339e09397ee8140ec41258737bea9681"}, + {file = "xattr-1.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7fa20a0c9ce022d19123b1c5b848d00a68b837251835a7929fe041ee81dcd0"}, + {file = "xattr-1.1.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e20eeb08e2c57fc7e71f050b1cfae35cbb46105449853a582bf53fd23c5379e"}, + {file = "xattr-1.1.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:477370e75821bded901487e5e752cffe554d1bd3bd4839b627d4d1ee8c95a093"}, + {file = "xattr-1.1.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a8682091cd34a9f4a93c8aaea4101aae99f1506e24da00a3cc3dd2eca9566f21"}, + {file = "xattr-1.1.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2e079b3b1a274ba2121cf0da38bbe5c8d2fb1cc49ecbceb395ce20eb7d69556d"}, + {file = "xattr-1.1.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ae6579dea05bf9f335a082f711d5924a98da563cac72a2d550f5b940c401c0e9"}, + {file = "xattr-1.1.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd6038ec9df2e67af23c212693751481d5f7e858156924f14340376c48ed9ac7"}, + {file = "xattr-1.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:608b2877526674eb15df4150ef4b70b7b292ae00e65aecaae2f192af224be200"}, + {file = "xattr-1.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54dad1a6a998c6a23edfd25e99f4d38e9b942d54e518570044edf8c767687ea"}, + {file = "xattr-1.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c0dab6ff72bb2b508f3850c368f8e53bd706585012676e1f71debba3310acde8"}, + {file = "xattr-1.1.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a3c54c6af7cf09432b2c461af257d5f4b1cb2d59eee045f91bacef44421a46d"}, + {file = "xattr-1.1.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e346e05a158d554639fbf7a0db169dc693c2d2260c7acb3239448f1ff4a9d67f"}, + {file = "xattr-1.1.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3ff6d9e2103d0d6e5fcd65b85a2005b66ea81c0720a37036445faadc5bbfa424"}, + {file = "xattr-1.1.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7a2ee4563c6414dfec0d1ac610f59d39d5220531ae06373eeb1a06ee37cd193f"}, + {file = "xattr-1.1.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878df1b38cfdadf3184ad8c7b0f516311128d5597b60ac0b3486948953658a83"}, + {file = "xattr-1.1.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0c9b8350244a1c5454f93a8d572628ff71d7e2fc2f7480dcf4c4f0e8af3150fe"}, + {file = "xattr-1.1.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a46bf48fb662b8bd745b78bef1074a1e08f41a531168de62b5d7bd331dadb11a"}, + {file = "xattr-1.1.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83fc3c07b583777b1dda6355329f75ca6b7179fe0d1002f1afe0ef96f7e3b5de"}, + {file = "xattr-1.1.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6308b19cff71441513258699f0538394fad5d66e1d324635207a97cb076fd439"}, + {file = "xattr-1.1.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48c00ddc15ddadc9c729cd9504dabf50adb3d9c28f647d4ac9a3df45a046b1a0"}, + {file = "xattr-1.1.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a06136196f26293758e1b244200b73156a0274af9a7349fa201c71c7af3bb9e8"}, + {file = "xattr-1.1.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8fc2631a3c6cfcdc71f7f0f847461839963754e76a2015de71e7e71e3304abc0"}, + {file = "xattr-1.1.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d6e1e835f9c938d129dd45e7eb52ebf7d2d6816323dab93ce311bf331f7d2328"}, + {file = "xattr-1.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:60dea2d369a6484e8b7136224fc2971e10e2c46340d83ab780924afe78c90066"}, + {file = "xattr-1.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:85c2b778b09d919523f80f244d799a142302582d76da18903dc693207c4020b0"}, + {file = "xattr-1.1.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ee0abba9e1b890d39141714ff43e9666864ca635ea8a5a2194d989e6b17fe862"}, + {file = "xattr-1.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e4174ba7f51f46b95ea7918d907c91cd579575d59e6a2f22ca36a0551026737"}, + {file = "xattr-1.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2b05e52e99d82d87528c54c2c5c8c5fb0ba435f85ac6545511aeea136e49925"}, + {file = "xattr-1.1.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a3696fad746be37de34eb73c60ea67144162bd08106a5308a90ce9dea9a3287"}, + {file = "xattr-1.1.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a3a7149439a26b68904c14fdc4587cde4ac7d80303e9ff0fefcfd893b698c976"}, + {file = "xattr-1.1.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:507b36a126ce900dbfa35d4e2c2db92570c933294cba5d161ecd6a89f7b52f43"}, + {file = "xattr-1.1.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9392b417b54923e031041940d396b1d709df1d3779c6744454e1f1c1f4dad4f5"}, + {file = "xattr-1.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e9f00315e6c02943893b77f544776b49c756ac76960bea7cb8d7e1b96aefc284"}, + {file = "xattr-1.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c8f98775065260140efb348b1ff8d50fd66ddcbf0c685b76eb1e87b380aaffb3"}, + {file = "xattr-1.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b471c6a515f434a167ca16c5c15ff34ee42d11956baa749173a8a4e385ff23e7"}, + {file = "xattr-1.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee0763a1b7ceb78ba2f78bee5f30d1551dc26daafcce4ac125115fa1def20519"}, + {file = "xattr-1.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:099e6e9ce7999b403d36d9cf943105a3d25d8233486b54ec9d1b78623b050433"}, + {file = "xattr-1.1.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e56faef9dde8d969f0d646fb6171883693f88ae39163ecd919ec707fbafa85"}, + {file = "xattr-1.1.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:328156d4e594c9ae63e1072503c168849e601a153ad37f0290743544332d6b6f"}, + {file = "xattr-1.1.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a57a55a27c7864d6916344c9a91776afda6c3b8b2209f8a69b79cdba93fbe128"}, + {file = "xattr-1.1.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3c19cdde08b040df1e99d2500bf8a9cff775ab0e6fa162bf8afe6d84aa93ed04"}, + {file = "xattr-1.1.4-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c72667f19d3a9acf324aed97f58861d398d87e42314731e7c6ab3ac7850c971"}, + {file = "xattr-1.1.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:67ae934d75ea2563fc48a27c5945749575c74a6de19fdd38390917ddcb0e4f24"}, + {file = "xattr-1.1.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1b0c348dd8523554dc535540d2046c0c8a535bb086561d8359f3667967b6ca"}, + {file = "xattr-1.1.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22284255d2a8e8f3da195bd8e8d43ce674dbc7c38d38cb6ecfb37fae7755d31f"}, + {file = "xattr-1.1.4-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b38aac5ef4381c26d3ce147ca98fba5a78b1e5bcd6be6755b4908659f2705c6d"}, + {file = "xattr-1.1.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:803f864af528f6f763a5be1e7b1ccab418e55ae0e4abc8bda961d162f850c991"}, + {file = "xattr-1.1.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:40354ebfb5cecd60a5fbb9833a8a452d147486b0ffec547823658556625d98b5"}, + {file = "xattr-1.1.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2abaf5d06be3361bfa8e0db2ee123ba8e92beab5bceed5e9d7847f2145a32e04"}, + {file = "xattr-1.1.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e638e5ffedc3565242b5fa3296899d35161bad771f88d66277b58f03a1ba9fe"}, + {file = "xattr-1.1.4-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0597e919d116ec39997804288d77bec3777228368efc0f2294b84a527fc4f9c2"}, + {file = "xattr-1.1.4-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee9455c501d19f065527afda974418b3ef7c61e85d9519d122cd6eb3cb7a00"}, + {file = "xattr-1.1.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:89ed62ce430f5789e15cfc1ccabc172fd8b349c3a17c52d9e6c64ecedf08c265"}, + {file = "xattr-1.1.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e25b824f4b9259cd8bb6e83c4873cf8bf080f6e4fa034a02fe778e07aba8d345"}, + {file = "xattr-1.1.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8fba66faa0016dfc0af3dd7ac5782b5786a1dfb851f9f3455e266f94c2a05a04"}, + {file = "xattr-1.1.4-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ec4b0c3e0a7bcd103f3cf31dd40c349940b2d4223ce43d384a3548992138ef1"}, + {file = "xattr-1.1.4.tar.gz", hash = "sha256:b7b02ecb2270da5b7e7deaeea8f8b528c17368401c2b9d5f63e91f545b45d372"}, +] + +[package.dependencies] +cffi = ">=1.16.0" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "yarl" +version = "1.18.3" +description = "Yet another URL library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"}, + {file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"}, + {file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"}, + {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"}, + {file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"}, + {file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"}, + {file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"}, + {file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"}, + {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"}, + {file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"}, + {file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576"}, + {file = "yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba"}, + {file = "yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393"}, + {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285"}, + {file = "yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2"}, + {file = "yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa"}, + {file = "yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58"}, + {file = "yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10"}, + {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8"}, + {file = "yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d"}, + {file = "yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"}, + {file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"}, + {file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"}, + {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"}, + {file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"}, + {file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"}, + {file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"}, + {file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +propcache = ">=0.2.0" + +[[package]] +name = "zipp" +version = "3.21.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.10\" or python_version == \"3.11\"" +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[[package]] +name = "zstandard" +version = "0.23.0" +description = "Zstandard bindings for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"}, + {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c"}, + {file = "zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813"}, + {file = "zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4"}, + {file = "zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e"}, + {file = "zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473"}, + {file = "zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160"}, + {file = "zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0"}, + {file = "zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094"}, + {file = "zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35"}, + {file = "zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d"}, + {file = "zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b"}, + {file = "zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9"}, + {file = "zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33"}, + {file = "zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd"}, + {file = "zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b"}, + {file = "zstandard-0.23.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ef3775758346d9ac6214123887d25c7061c92afe1f2b354f9388e9e4d48acfc"}, + {file = "zstandard-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4051e406288b8cdbb993798b9a45c59a4896b6ecee2f875424ec10276a895740"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2d1a054f8f0a191004675755448d12be47fa9bebbcffa3cdf01db19f2d30a54"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f83fa6cae3fff8e98691248c9320356971b59678a17f20656a9e59cd32cee6d8"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32ba3b5ccde2d581b1e6aa952c836a6291e8435d788f656fe5976445865ae045"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f146f50723defec2975fb7e388ae3a024eb7151542d1599527ec2aa9cacb152"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1bfe8de1da6d104f15a60d4a8a768288f66aa953bbe00d027398b93fb9680b26"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:29a2bc7c1b09b0af938b7a8343174b987ae021705acabcbae560166567f5a8db"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61f89436cbfede4bc4e91b4397eaa3e2108ebe96d05e93d6ccc95ab5714be512"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53ea7cdc96c6eb56e76bb06894bcfb5dfa93b7adcf59d61c6b92674e24e2dd5e"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:a4ae99c57668ca1e78597d8b06d5af837f377f340f4cce993b551b2d7731778d"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:379b378ae694ba78cef921581ebd420c938936a153ded602c4fea612b7eaa90d"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:50a80baba0285386f97ea36239855f6020ce452456605f262b2d33ac35c7770b"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:61062387ad820c654b6a6b5f0b94484fa19515e0c5116faf29f41a6bc91ded6e"}, + {file = "zstandard-0.23.0-cp38-cp38-win32.whl", hash = "sha256:b8c0bd73aeac689beacd4e7667d48c299f61b959475cdbb91e7d3d88d27c56b9"}, + {file = "zstandard-0.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:a05e6d6218461eb1b4771d973728f0133b2a4613a6779995df557f70794fd60f"}, + {file = "zstandard-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa014d55c3af933c1315eb4bb06dd0459661cc0b15cd61077afa6489bec63bb"}, + {file = "zstandard-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7f0804bb3799414af278e9ad51be25edf67f78f916e08afdb983e74161b916"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb2b1ecfef1e67897d336de3a0e3f52478182d6a47eda86cbd42504c5cbd009a"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:837bb6764be6919963ef41235fd56a6486b132ea64afe5fafb4cb279ac44f259"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1516c8c37d3a053b01c1c15b182f3b5f5eef19ced9b930b684a73bad121addf4"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ef6a43b1846f6025dde6ed9fee0c24e1149c1c25f7fb0a0585572b2f3adc58"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11e3bf3c924853a2d5835b24f03eeba7fc9b07d8ca499e247e06ff5676461a15"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2fb4535137de7e244c230e24f9d1ec194f61721c86ebea04e1581d9d06ea1269"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c24f21fa2af4bb9f2c492a86fe0c34e6d2c63812a839590edaf177b7398f700"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8c86881813a78a6f4508ef9daf9d4995b8ac2d147dcb1a450448941398091c9"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe3b385d996ee0822fd46528d9f0443b880d4d05528fd26a9119a54ec3f91c69"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:82d17e94d735c99621bf8ebf9995f870a6b3e6d14543b99e201ae046dfe7de70"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c7c517d74bea1a6afd39aa612fa025e6b8011982a0897768a2f7c8ab4ebb78a2"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fd7e0f1cfb70eb2f95a19b472ee7ad6d9a0a992ec0ae53286870c104ca939e5"}, + {file = "zstandard-0.23.0-cp39-cp39-win32.whl", hash = "sha256:43da0f0092281bf501f9c5f6f3b4c975a8a0ea82de49ba3f7100e64d422a1274"}, + {file = "zstandard-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:f8346bfa098532bc1fb6c7ef06783e969d87a99dd1d2a5a18a892c1d7a643c58"}, + {file = "zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09"}, +] + +[package.dependencies] +cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} + +[package.extras] +cffi = ["cffi (>=1.11)"] + +[metadata] +lock-version = "2.1" +python-versions = ">=3.10,<4.0" +content-hash = "9b132012b1e894f31b66796668c874f0c81ca3077c67e12878b00ccc3e8242ac" diff --git a/rowboat_agents/pyproject.toml b/rowboat_agents/pyproject.toml new file mode 100644 index 000000000..11edf8beb --- /dev/null +++ b/rowboat_agents/pyproject.toml @@ -0,0 +1,110 @@ +[tool.poetry] +name = "agents" +version = "0.1.0" +description = "RowBoat Labs Agent OS" +authors = ["Akhilesh "] +license = "MIT" +readme = "README.md" +homepage = "https://github.com/rowboatlabs/agents" +package-mode = false + +[tool.poetry.dependencies] +python = ">=3.10,<4.0" + +# Dependencies +aiohttp = "^3.9.3" +annotated-types = "^0.7.0" +anyio = "^4.8.0" +asgiref = "*" +beautifulsoup4 = "^4.12.3" +blinker = "^1.9.0" +build = "1.2.2.post1" +CacheControl = "^0.14.2" +certifi = "^2024.12.14" +cffi = "^1.17.1" +charset-normalizer = "^3.4.1" +cleo = "^2.1.0" +click = "^8.1.8" +crashtest = "^0.4.1" +distlib = "^0.3.9" +distro = "^1.9.0" +dnspython = "^2.7.0" +dulwich = "^0.22.7" +et_xmlfile = "^2.0.0" +eval_type_backport = "^0.2.2" +fastjsonschema = "^2.21.1" +filelock = "^3.17.0" +firecrawl = "^1.9.0" +Flask = "^3.1.0" +gunicorn = "^23.0.0" +h11 = "^0.14.0" +httpcore = "^1.0.7" +httpx = "^0.27.2" +hypercorn = "*" +idna = "^3.10" +installer = "^0.7.0" +itsdangerous = "^2.2.0" +"jaraco.classes" = "^3.4.0" +"jaraco.context" = "^6.0.1" +"jaraco.functools" = "^4.1.0" +Jinja2 = "^3.1.5" +jiter = "^0.6.1" +jsonpath-python = "^1.0.6" +keyring = "^25.6.0" +lxml = "^5.3.0" +markdownify = "^0.13.1" +MarkupSafe = "^3.0.2" +mcp = "*" +more-itertools = "^10.6.0" +motor = "*" +msgpack = "^1.1.0" +mypy-extensions = "^1.0.0" +nest-asyncio = "^1.6.0" +numpy = "^2.2.1" +openai = "*" +openpyxl = "^3.1.5" +packaging = "^24.2" +pandas = "^2.2.3" +pkginfo = "^1.12.0" +platformdirs = "^4.3.6" +poetry = "^2.0.1" +poetry-core = "^2.0.1" +pycparser = "^2.22" +pydantic = "^2.10.5" +pydantic_core = "^2.27.2" +PyJWT = "^2.10.1" +pymongo = "^4.10.1" +pyproject_hooks = "^1.2.0" +python-dateutil = "^2.9.0.post0" +python-docx = "^1.1.2" +python-dotenv = "^1.0.1" +pytz = "^2024.2" +qdrant-client = "*" +Quart = "^0.20.0" +RapidFuzz = "^3.11.0" +requests = "^2.32.3" +requests-toolbelt = "^1.0.0" +setuptools = "^75.8.0" +shellingham = "^1.5.4" +six = "^1.17.0" +sniffio = "^1.3.1" +soupsieve = "^2.6" +tabulate = "^0.9.0" +tomlkit = "^0.13.2" +tqdm = "^4.67.1" +trove-classifiers = "^2025.1.15.22" +typing-inspect = "^0.9.0" +typing_extensions = "^4.12.2" +tzdata = "^2024.2" +urllib3 = "^2.3.0" +virtualenv = "^20.29.1" +waitress = "^2.1.2" +websockets = "^13.1" +Werkzeug = "^3.1.3" +wheel = "^0.44.0" +xattr = "^1.1.4" +openai-agents = "^0.0.13" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/rowboat_agents/requirements.txt b/rowboat_agents/requirements.txt new file mode 100644 index 000000000..29063f1e9 --- /dev/null +++ b/rowboat_agents/requirements.txt @@ -0,0 +1,153 @@ +aiofiles==24.1.0 +aiohappyeyeballs==2.6.1 +aiohttp==3.11.14 +aiosignal==1.3.2 +annotated-types==0.7.0 +anyio==4.8.0 +asgiref==3.8.1 +attrs==25.3.0 +babel==2.16.0 +beautifulsoup4==4.12.3 +blinker==1.9.0 +build==1.2.2.post1 +CacheControl==0.14.2 +certifi==2024.12.14 +cffi==1.17.1 +charset-normalizer==3.4.1 +cleo==2.1.0 +click==8.1.8 +colorama==0.4.6 +crashtest==0.4.1 +distlib==0.3.9 +distro==1.9.0 +dnspython==2.7.0 +dulwich==0.22.8 +et_xmlfile==2.0.0 +eval_type_backport==0.2.2 +fastjsonschema==2.21.1 +filelock==3.18.0 +findpython==0.6.3 +firecrawl==1.9.0 +Flask==3.1.0 +frozenlist==1.5.0 +fsspec==2025.3.2 +ghp-import==2.1.0 +griffe==1.7.3 +grpcio==1.71.0 +grpcio-tools==1.71.0 +gunicorn==23.0.0 +h11==0.14.0 +h2==4.2.0 +hpack==4.1.0 +httpcore==1.0.7 +httpx==0.27.2 +httpx-sse==0.4.0 +huggingface-hub==0.30.2 +Hypercorn==0.17.3 +hyperframe==6.1.0 +idna==3.10 +importlib_metadata==8.6.1 +installer==0.7.0 +itsdangerous==2.2.0 +jaraco.classes==3.4.0 +jaraco.context==6.0.1 +jaraco.functools==4.1.0 +Jinja2==3.1.5 +jiter==0.6.1 +jsonpath-python==1.0.6 +jsonschema==4.23.0 +jsonschema-specifications==2025.4.1 +keyring==25.6.0 +litellm==1.67.2 +lxml==5.3.0 +Markdown==3.7 +markdownify==0.13.1 +MarkupSafe==3.0.2 +mcp==1.6.0 +mergedeep==1.3.4 +mistralai==1.2.3 +mkdocs==1.6.1 +mkdocs-get-deps==0.2.0 +mkdocs-material==9.5.50 +mkdocs-material-extensions==1.3.1 +more-itertools==10.6.0 +motor==3.7.0 +msgpack==1.1.0 +multidict==6.2.0 +mypy-extensions==1.0.0 +nest-asyncio==1.6.0 +numpy==2.2.1 +openai==1.76.0 +openai-agents==0.0.13 +openpyxl==3.1.5 +packaging==24.2 +paginate==0.5.7 +pandas==2.2.3 +pathspec==0.12.1 +pbs-installer==2025.3.17 +pkginfo==1.12.1.2 +platformdirs==4.3.7 +poetry==2.1.1 +poetry-core==2.1.1 +portalocker==2.10.1 +priority==2.0.0 +propcache==0.3.0 +protobuf==5.29.4 +pycparser==2.22 +pydantic==2.10.5 +pydantic-settings==2.8.1 +pydantic_core==2.27.2 +Pygments==2.19.1 +PyJWT==2.10.1 +pymdown-extensions==10.14.1 +pymongo==4.10.1 +pyproject_hooks==1.2.0 +python-dateutil==2.9.0.post0 +python-docx==1.1.2 +python-dotenv==1.0.1 +pytz==2024.2 +PyYAML==6.0.2 +pyyaml_env_tag==0.1 +qdrant-client==1.13.3 +Quart==0.20.0 +RapidFuzz==3.12.2 +redis==5.2.1 +referencing==0.36.2 +regex==2024.11.6 +requests==2.32.3 +requests-toolbelt==1.0.0 +rpds-py==0.24.0 +setuptools==75.8.0 +shellingham==1.5.4 +six==1.17.0 +sniffio==1.3.1 +sounddevice==0.5.1 +soupsieve==2.6 +sse-starlette==2.2.1 +sseclient==0.0.27 +sseclient-py==1.8.0 +starlette==0.46.1 +tabulate==0.9.0 +tiktoken==0.9.0 +tokenizers==0.21.1 +tomlkit==0.13.2 +tqdm==4.67.1 +trove-classifiers==2025.3.19.19 +types-requests==2.32.0.20250328 +typing-inspect==0.9.0 +typing-inspection==0.4.0 +typing_extensions==4.12.2 +tzdata==2024.2 +urllib3==2.3.0 +uvicorn==0.34.0 +virtualenv==20.29.3 +waitress==2.1.2 +watchdog==6.0.0 +websockets==13.1 +Werkzeug==3.1.3 +wheel==0.44.0 +wsproto==1.2.0 +xattr==1.1.4 +yarl==1.18.3 +zipp==3.21.0 +zstandard==0.23.0 diff --git a/rowboat_agents/src/__init__.py b/rowboat_agents/src/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/rowboat_agents/src/app/__init__.py b/rowboat_agents/src/app/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/rowboat_agents/src/app/main.py b/rowboat_agents/src/app/main.py new file mode 100644 index 000000000..81b5922d3 --- /dev/null +++ b/rowboat_agents/src/app/main.py @@ -0,0 +1,184 @@ +import traceback +from quart import Quart, request, jsonify, Response +from functools import wraps +import os +import json +from hypercorn.config import Config +from hypercorn.asyncio import serve +import asyncio + +from src.graph.core import run_turn_streamed +from src.utils.common import read_json_from_file + +app = Quart(__name__) +master_config = read_json_from_file("./configs/default_config.json") +print("Master config:", master_config) + +# Get environment variables with defaults +ENABLE_TRACING = False +if os.environ.get('ENABLE_TRACING'): + ENABLE_TRACING = os.environ.get('ENABLE_TRACING').lower() == 'true' + +# filter out agent transfer messages using a function +def is_agent_transfer_message(msg): + if (msg.get("role") == "assistant" and + msg.get("content") is None and + msg.get("tool_calls") is not None and + len(msg.get("tool_calls")) > 0 and + msg.get("tool_calls")[0].get("function").get("name") == "transfer_to_agent"): + return True + if (msg.get("role") == "tool" and + msg.get("tool_calls") is None and + msg.get("tool_call_id") is not None and + msg.get("tool_name") == "transfer_to_agent"): + return True + return False + +@app.route("/health", methods=["GET"]) +async def health(): + return jsonify({"status": "ok"}) + +@app.route("/") +async def home(): + return "Hello, World!" + +def require_api_key(f): + @wraps(f) + async def decorated(*args, **kwargs): + auth_header = request.headers.get('Authorization') + if not auth_header or not auth_header.startswith('Bearer '): + return jsonify({'error': 'Missing or invalid authorization header'}), 401 + + token = auth_header.split('Bearer ')[1] + actual = os.environ.get('API_KEY', '').strip() + if actual and token != actual: + return jsonify({'error': 'Invalid API key'}), 403 + + return await f(*args, **kwargs) + return decorated + +@app.route("/chat", methods=["POST"]) +@require_api_key +async def chat(): + print('='*100) + print(f"{'*'*100}Running server mode{'*'*100}") + try: + request_data = await request.get_json() + print("Request:", json.dumps(request_data)) + + # filter out agent transfer messages + input_messages = [msg for msg in request_data["messages"] if not is_agent_transfer_message(msg)] + + # Preprocess messages to handle null content and role issues + for msg in input_messages: + if (msg.get("role") == "assistant" and + msg.get("content") is None and + msg.get("tool_calls") is not None and + len(msg.get("tool_calls")) > 0): + msg["content"] = "Calling tool" + + if msg.get("role") == "tool": + msg["role"] = "developer" + elif not msg.get("role"): + msg["role"] = "user" + + data = request_data + messages = [] + final_state = {} + + async for event_type, event_data in run_turn_streamed( + messages=input_messages, + start_agent_name=data.get("startAgent", ""), + agent_configs=data.get("agents", []), + tool_configs=data.get("tools", []), + prompt_configs=data.get("prompts", []), + start_turn_with_start_agent=master_config.get("start_turn_with_start_agent", False), + state=data.get("state", {}), + complete_request=data, + enable_tracing=ENABLE_TRACING + ): + if event_type == 'message': + messages.append(event_data) + elif event_type == 'done': + final_state = event_data['state'] + + out = { + "messages": messages, + "state": final_state, + } + + print("Output:") + for k, v in out.items(): + print(f"{k}: {v}") + print('*'*100) + + return jsonify(out) + + except Exception as e: + print(traceback.format_exc()) + print(f"Error: {str(e)}") + return jsonify({"error": str(e)}), 500 + +def format_sse(data: dict, event: str = None) -> str: + msg = f"data: {json.dumps(data)}\n\n" + if event is not None: + msg = f"event: {event}\n{msg}" + return msg + +@app.route("/chat_stream", methods=["POST"]) +@require_api_key +async def chat_stream(): + # get the request data from the request + request_data = await request.get_data() + + print("Request:", request_data.decode('utf-8')) + request_data = json.loads(request_data) + + # filter out agent transfer messages + input_messages = [msg for msg in request_data["messages"] if not is_agent_transfer_message(msg)] + + # Preprocess messages to handle null content and role issues + for msg in input_messages: + if (msg.get("role") == "assistant" and + msg.get("content") is None and + msg.get("tool_calls") is not None and + len(msg.get("tool_calls")) > 0): + msg["content"] = "Calling tool" + + if msg.get("role") == "tool": + msg["role"] = "developer" + elif not msg.get("role"): + msg["role"] = "user" + + async def generate(): + print("Running generate() in server") + try: + async for event_type, event_data in run_turn_streamed( + messages=input_messages, + start_agent_name=request_data.get("startAgent", ""), + agent_configs=request_data.get("agents", []), + tool_configs=request_data.get("tools", []), + prompt_configs=request_data.get("prompts", []), + start_turn_with_start_agent=master_config.get("start_turn_with_start_agent", False), + state=request_data.get("state", {}), + complete_request=request_data, + enable_tracing=ENABLE_TRACING + ): + if event_type == 'message': + yield format_sse(event_data, "message") + elif event_type == 'done': + yield format_sse(event_data, "done") + elif event_type == 'error': + yield format_sse(event_data, " error") + + except Exception as e: + print(f"Streaming error: {str(e)}") + yield format_sse({"error": str(e)}, "error") + + return Response(generate(), mimetype='text/event-stream') + +if __name__ == "__main__": + print("Starting async server...") + config = Config() + config.bind = ["0.0.0.0:4040"] + asyncio.run(serve(app, config)) \ No newline at end of file diff --git a/rowboat_agents/src/graph/__init__.py b/rowboat_agents/src/graph/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/rowboat_agents/src/graph/core.py b/rowboat_agents/src/graph/core.py new file mode 100644 index 000000000..ca177df34 --- /dev/null +++ b/rowboat_agents/src/graph/core.py @@ -0,0 +1,647 @@ +import traceback +from copy import deepcopy +from datetime import datetime +import json +import uuid +import logging +from .helpers.access import ( + get_agent_by_name, + get_external_tools, + get_prompt_by_type, + get_agent_config_by_name +) +from .helpers.library_tools import handle_web_search_event +from .helpers.control import get_last_agent_name +from .execute_turn import run_streamed as swarm_run_streamed, get_agents, call_mcp +from .helpers.instructions import add_child_transfer_related_instructions +from .types import PromptType, outputVisibility, ResponseType +from agents.extensions.handoff_prompt import RECOMMENDED_PROMPT_PREFIX + +logger = logging.getLogger(__name__) + +def order_messages(messages): + """ + Sorts each message's keys in a specified order and returns a new list of ordered messages. + """ + ordered_messages = [] + for msg in messages: + # Filter out None values + msg = {k: v for k, v in msg.items() if v is not None} + + # Specify the exact order + ordered = {} + for key in ['role', 'sender', 'content', 'created_at', 'timestamp']: + if key in msg: + ordered[key] = msg[key] + + # Add remaining keys in alphabetical order + remaining_keys = sorted(k for k in msg if k not in ordered) + for key in remaining_keys: + ordered[key] = msg[key] + + ordered_messages.append(ordered) + return ordered_messages + +def set_sys_message(messages): + """ + If the system message is empty, set it to the default message: "You are a helplful assistant." + """ + if messages[0].get("role") == "system" and messages[0].get("content") == "": + messages[0]["content"] = "You are a helpful assistant." + print("Updated system message: ", messages[0]) + + return messages + +def add_child_transfer_related_instructions_to_agents(agents): + for agent in agents: + add_child_transfer_related_instructions(agent) + return agents + +def add_openai_recommended_instructions_to_agents(agents): + for agent in agents: + agent.instructions = RECOMMENDED_PROMPT_PREFIX + '\n\n' + agent.instructions + return agents + +def check_internal_visibility(current_agent): + """Check if an agent is internal based on its outputVisibility""" + return current_agent.output_visibility == outputVisibility.INTERNAL.value + +def add_sender_details_to_messages(messages): + for msg in messages: + msg['sender'] = msg.get('sender', None) + if msg.get('sender'): + msg['content'] = f"Sender agent: {msg.get('sender')}\nContent: {msg.get('content')}" + return messages + +def append_messages(messages, accumulated_messages): + # Create a set of existing message identifiers for O(1) lookup + # For tool messages, use both content and tool_call_id + # For other messages, just use content + existing_messages = set() + for msg in messages: + if msg.get('role') == 'tool': + existing_messages.add((msg.get('content'), msg.get('tool_call_id'))) + else: + existing_messages.add((msg.get('content'), None)) + + # Append messages that aren't already present, preserving order + for msg in accumulated_messages: + msg_id = (msg.get('content'), msg.get('tool_call_id') if msg.get('role') == 'tool' else None) + if msg_id not in existing_messages: + messages.append(msg) + existing_messages.add(msg_id) + + return messages + +async def run_turn_streamed( + messages, + start_agent_name, + agent_configs, + tool_configs, + prompt_configs, + start_turn_with_start_agent, + state={}, + complete_request={}, + enable_tracing=None +): + """ + Run a turn of the conversation with streaming responses. + + A turn consists of all messages between user inputs and must follow these rules: + 1. Each turn must have exactly one external message from an agent with external visibility + 2. A turn can have multiple internal messages from internal agents + 3. Each agent can output at most one regular message per parent + 4. Control flows from parent to child, and child must return to parent after responding + 5. Turn ends when an external agent outputs a message + """ + print("\n=== Starting new turn ===") + print(f"Starting agent: {start_agent_name}") + + # Use enable_tracing from complete_request if available, otherwise default to False + enable_tracing = complete_request.get("enable_tracing", False) if enable_tracing is None else enable_tracing + + # Add complete_request to the system message for tool configuration + if messages and messages[0].get("role") == "system": + messages[0]["complete_request"] = complete_request + + messages = set_sys_message(messages) + messages = add_sender_details_to_messages(messages) + is_greeting_turn = not any(msg.get("role") != "system" for msg in messages) + final_state = None + accumulated_messages = [] + agent_message_counts = {} # Track messages per agent + child_call_counts = {} # Track parent->child calls + current_agent = None + parent_stack = [] + + try: + # Handle greeting turn + if is_greeting_turn: + greeting_prompt = get_prompt_by_type(prompt_configs, PromptType.GREETING) or "How can I help you today?" + message = { + 'content': greeting_prompt, + 'role': 'assistant', + 'sender': start_agent_name, + 'tool_calls': None, + 'tool_call_id': None, + 'tool_name': None, + 'response_type': ResponseType.EXTERNAL.value + } + accumulated_messages.append(message) + print('-'*100) + print(f"Yielding message: {message}") + print('-'*100) + yield ('message', message) + final_state = { + "last_agent_name": start_agent_name, + "tokens": {"total": 0, "prompt": 0, "completion": 0}, + "turn_messages": accumulated_messages + } + print('-'*100) + print(f"Yielding done: {final_state}") + print('-'*100) + yield ('done', {'state': final_state}) + return + + # Initialize agents and get external tools + new_agents = get_agents( + agent_configs=agent_configs, + tool_configs=tool_configs, + complete_request=complete_request + ) + new_agents = add_child_transfer_related_instructions_to_agents(new_agents) + new_agents = add_openai_recommended_instructions_to_agents(new_agents) + last_agent_name = get_last_agent_name( + state=state, + agent_configs=agent_configs, + start_agent_name=start_agent_name, + msg_type="user", + latest_assistant_msg=None, + start_turn_with_start_agent=start_turn_with_start_agent + ) + current_agent = get_agent_by_name(last_agent_name, new_agents) + external_tools = get_external_tools(tool_configs) + + # Ensure external_tools contains proper tool objects with MCP configuration + if external_tools and isinstance(external_tools[0], str): + print("Converting string tools to tool objects") + tool_objects = [] + for tool_name in external_tools: + tool_config = next((t for t in tool_configs if t["name"] == tool_name), None) + if tool_config: + # Create a proper async tool object + class AsyncTool: + def __init__(self, name, description, is_mcp, mcp_server_name): + self.name = name + self.description = description + self.is_mcp = is_mcp + self.mcp_server_name = mcp_server_name + + async def execute(self, **kwargs): + if self.is_mcp: + mcp_servers = kwargs.get("mcp_servers", []) + if not mcp_servers: + raise ValueError("No MCP servers configured") + + mcp_server = next((server for server in mcp_servers + if server.get("name") == self.mcp_server_name), None) + if not mcp_server: + raise ValueError(f"No MCP server found with name: {self.mcp_server_name}") + + mcp_server_url = mcp_server.get("url") + if not mcp_server_url: + raise ValueError(f"No URL configured for MCP server: {self.mcp_server_name}") + + # Remove mcp_servers from kwargs before passing to call_mcp + tool_args = {k: v for k, v in kwargs.items() if k != "mcp_servers"} + + # Call the MCP server + try: + result = await call_mcp(self.name, json.dumps(tool_args), mcp_server_url) + return result + except Exception as e: + print(f"Error in call_mcp: {str(e)}") + raise + else: + raise ValueError(f"Tool {self.name} is not configured as an MCP tool") + + tool = AsyncTool( + name=tool_name, + description=tool_config.get("description", ""), + is_mcp=tool_config.get("isMcp", False), + mcp_server_name=tool_config.get("mcpServerName") + ) + tool_objects.append(tool) + external_tools = tool_objects + + tokens_used = {"total": 0, "prompt": 0, "completion": 0} + iter = 0 + while True: + iter += 1 + is_internal_agent = check_internal_visibility(current_agent) + print('-'*100) + print(f"Iteration {iter} of turn loop") + print(f"Current agent: {current_agent.name} (internal: {is_internal_agent})") + print(f"Parent stack: {[agent.name for agent in parent_stack]}") + print('-'*100) + + messages = append_messages(messages, accumulated_messages) + # Run the current agent + stream_result = await swarm_run_streamed( + agent=current_agent, + messages=messages, + external_tools=external_tools, + tokens_used=tokens_used, + enable_tracing=enable_tracing + ) + + async for event in stream_result.stream_events(): + try: + # Handle web search events + if event.type == "raw_response_event": + # Handle token usage counting + if hasattr(event.data, 'type') and event.data.type == "response.completed" and hasattr(event.data.response, 'usage'): + try: + tokens_used["total"] += event.data.response.usage.total_tokens + tokens_used["prompt"] += event.data.response.usage.input_tokens + tokens_used["completion"] += event.data.response.usage.output_tokens + print('-'*50) + print(f"Found usage information. Updated cumulative tokens: {tokens_used}") + print('-'*50) + except Exception as e: + print(f"Warning: Tokens used is likely not available for your chosen model: {e}") + + web_search_messages = handle_web_search_event(event, current_agent) + for message in web_search_messages: + message['response_type'] = ResponseType.INTERNAL.value + print('-'*100) + print(f"Yielding message: {message}") + print('-'*100) + yield ('message', message) + if message.get('role') != 'tool': + message['content'] = f"Sender agent: {current_agent.name}\nContent: {message['content']}" + accumulated_messages.append(message) + continue + + # Handle agent transfer + elif event.type == "agent_updated_stream_event": + # Skip self-transfers + if current_agent.name == event.new_agent.name: + print(f"\nSkipping agent transfer attempt: {current_agent.name} -> {event.new_agent.name} (self-transfer)") + continue + + # Check if we've already called this child agent too many times + parent_child_key = f"{current_agent.name}:{event.new_agent.name}" + current_count = child_call_counts.get(parent_child_key, 0) + if current_count >= event.new_agent.max_calls_per_parent_agent: + print(f"Skipping transfer from {current_agent.name} to {event.new_agent.name} (max calls reached from parent to child)") + continue + + # Transfer to new agent + tool_call_id = str(uuid.uuid4()) + message = { + 'content': None, + 'role': 'assistant', + 'sender': current_agent.name, + 'tool_calls': [{ + 'function': { + 'name': 'transfer_to_agent', + 'arguments': json.dumps({ + 'assistant': event.new_agent.name + }) + }, + 'id': tool_call_id, + 'type': 'function' + }], + 'tool_call_id': None, + 'tool_name': None, + 'response_type': ResponseType.INTERNAL.value + } + print('-'*100) + print(f"Yielding message: {message}") + print('-'*100) + yield ('message', message) + + # Record transfer result + message = { + 'content': json.dumps({ + 'assistant': event.new_agent.name + }), + 'role': 'tool', # Changed from 'function' to 'tool' + 'name': 'transfer_to_agent', + 'sender': None, + 'tool_calls': None, + 'tool_call_id': tool_call_id, + 'tool_name': 'transfer_to_agent' + } + print('-'*100) + print(f"Yielding message: {message}") + print('-'*100) + yield ('message', message) + + # Update tracking and switch to child + if check_internal_visibility(event.new_agent): + child_call_counts[parent_child_key] = current_count + 1 + parent_stack.append(current_agent) + current_agent = event.new_agent + + # Handle regular messages and tool calls + elif event.type == "run_item_stream_event": + if event.item.type == "tool_call_item": + # Check if it's a web search call + if hasattr(event.item.raw_item, 'type') and event.item.raw_item.type == 'web_search_call': + web_search_messages = handle_web_search_event(event, current_agent) + for message in web_search_messages: + message['response_type'] = ResponseType.INTERNAL.value + print('-'*100) + print(f"Yielding message: {message}") + print('-'*100) + yield ('message', message) + if message.get('role') != 'tool': + message['content'] = f"Sender agent: {current_agent.name}\nContent: {message['content']}" + accumulated_messages.append(message) + continue + + # Handle regular tool calls + message = { + 'content': None, + 'role': 'assistant', + 'sender': current_agent.name, + 'tool_calls': [{ + 'function': { + 'name': event.item.raw_item.name, + 'arguments': event.item.raw_item.arguments + }, + 'id': event.item.raw_item.call_id, + 'type': 'function' + }], + 'tool_call_id': None, + 'tool_name': None, + 'response_type': ResponseType.INTERNAL.value + } + print('-'*100) + print(f"Yielding message: {message}") + print('-'*100) + yield ('message', message) + message['content'] = f"Sender agent: {current_agent.name}\nContent: {message['content']}" + accumulated_messages.append(message) + + # Execute the tool call + try: + tool_name = event.item.raw_item.name + tool_args = json.loads(event.item.raw_item.arguments) + tool_call_id = event.item.raw_item.call_id + + # Find the tool in external_tools + tool = next((t for t in external_tools if t.name == tool_name), None) + if tool: + print(f"Executing tool call: {tool_name} with args: {tool_args}") + # Add MCP servers to tool args if this is an MCP tool + if hasattr(tool, 'is_mcp') and tool.is_mcp: + mcp_servers = complete_request.get("mcpServers", []) + if not mcp_servers: + raise ValueError("No MCP servers configured in complete_request") + tool_args["mcp_servers"] = mcp_servers + tool_result = await tool.execute(**tool_args) + + # Handle MCP tool response + if tool_config.get("isMcp", False): + mcp_server_name = tool_config.get("mcpServerName", "mine") + mcp_server = next((s for s in mcp_servers if s["name"] == mcp_server_name), None) + if not mcp_server: + raise ValueError(f"MCP server {mcp_server_name} not found") + + mcp_url = mcp_server["url"] + logger.debug(f"MCP tool called for: {tool_name}") + logger.debug(f"MCP server URL: {mcp_url}") + + # Call MCP tool with correct parameters + tool_result = await call_mcp(tool_name, json.dumps(tool_args), mcp_url) + logger.debug(f"MCP tool response: {tool_result}") + + # Format the tool response - ensure it's a string + if isinstance(tool_result, dict): + tool_result = json.dumps(tool_result) + elif not isinstance(tool_result, str): + tool_result = str(tool_result) + + # Create tool response message + tool_response = { + "content": tool_result, + "role": "tool", + "name": tool_name, + "sender": None, + "tool_calls": None, + "tool_call_id": tool_call_id, + "tool_name": tool_name, + "response_type": "internal" + } + + # Add tool response to accumulated messages + accumulated_messages.append(tool_response) + yield tool_response + + # Also yield a tool call output message + tool_call_output = { + "content": json.dumps({ + "role": "tool", + "name": tool_name, + "content": tool_result + }), + "role": "tool", + "name": "recommendation", + "sender": None, + "tool_calls": None, + "tool_call_id": tool_call_id, + "tool_name": "recommendation", + "response_type": "internal" + } + yield tool_call_output + else: + print(f"Tool not found: {tool_name}") + except Exception as e: + print(f"Error executing tool call: {str(e)}") + print(f"Traceback: {traceback.format_exc()}") + # Create error response + error_response = { + 'content': json.dumps({"error": str(e)}), + 'role': 'tool', # Changed from 'function' to 'tool' + 'name': tool_name, + 'sender': None, + 'tool_calls': None, + 'tool_call_id': tool_call_id, + 'tool_name': tool_name, + 'response_type': ResponseType.INTERNAL.value + } + yield ('message', error_response) + accumulated_messages.append(error_response) + + elif event.item.type == "tool_call_output_item": + # Get the tool name and call id from raw_item + tool_call_id = None + tool_name = None + + # Try to get call_id from various possible locations + if hasattr(event.item.raw_item, 'call_id'): + tool_call_id = event.item.raw_item.call_id + elif isinstance(event.item.raw_item, dict) and 'call_id' in event.item.raw_item: + tool_call_id = event.item.raw_item['call_id'] + + # Try to get tool name from various possible locations + if hasattr(event.item.raw_item, 'name'): + tool_name = event.item.raw_item.name + elif isinstance(event.item.raw_item, dict): + if 'name' in event.item.raw_item: + tool_name = event.item.raw_item['name'] + elif 'type' in event.item.raw_item and event.item.raw_item['type'] == 'function_call_output': + # For function call outputs, try to infer from context + tool_name = 'recommendation' # Default for function calls + + # Fallback to event item if available + if not tool_name and hasattr(event.item, 'tool_name'): + tool_name = event.item.tool_name + if not tool_call_id and hasattr(event.item, 'tool_call_id'): + tool_call_id = event.item.tool_call_id + + # Handle tool call output + message = { + 'content': str(event.item.output), + 'role': 'tool', + 'name': tool_name, + 'sender': None, + 'tool_calls': None, + 'tool_call_id': tool_call_id, + 'tool_name': tool_name, + 'response_type': ResponseType.INTERNAL.value + } + print('-'*100) + print(f"Yielding tool call output message: {message}") + print('-'*100) + yield ('message', message) + accumulated_messages.append(message) + + elif event.item.type == "message_output_item": + # Extract content and citations + content = "" + url_citations = [] + if hasattr(event.item.raw_item, 'content'): + for content_item in event.item.raw_item.content: + if hasattr(content_item, 'text'): + content += content_item.text + if hasattr(content_item, 'annotations'): + for annotation in content_item.annotations: + if hasattr(annotation, 'type') and annotation.type == 'url_citation': + citation = { + 'url': annotation.url if hasattr(annotation, 'url') else '', + 'title': annotation.title if hasattr(annotation, 'title') else '', + 'start_index': annotation.start_index if hasattr(annotation, 'start_index') else 0, + 'end_index': annotation.end_index if hasattr(annotation, 'end_index') else 0 + } + url_citations.append(citation) + + # Determine message type and create message + is_internal = check_internal_visibility(current_agent) + response_type = ResponseType.INTERNAL.value if is_internal else ResponseType.EXTERNAL.value + + message = { + 'content': content, + 'role': 'assistant', + 'sender': current_agent.name, + 'tool_calls': None, + 'tool_call_id': None, + 'tool_name': None, + 'response_type': response_type + } + + if url_citations: + message['citations'] = url_citations + + # Track that this agent has responded + if not message.get('tool_calls'): # If there are no tool calls, it's a content response + agent_message_counts[current_agent.name] = 1 + print('-'*100) + print(f"Yielding message: {message}") + print('-'*100) + yield ('message', message) + message['content'] = f"Sender agent: {current_agent.name}\nContent: {message['content']}" + accumulated_messages.append(message) + # Return to parent or end turn + if is_internal and parent_stack: + # Create tool call for control transition + tool_call_id = str(uuid.uuid4()) + transition_message = { + 'content': None, + 'role': 'assistant', + 'sender': current_agent.name, + 'tool_calls': [{ + 'function': { + 'name': 'transfer_to_agent', + 'arguments': json.dumps({ + 'assistant': parent_stack[-1].name + }) + }, + 'id': tool_call_id, + 'type': 'function' + }], + 'tool_call_id': None, + 'tool_name': None, + 'response_type': ResponseType.INTERNAL.value + } + print('-'*100) + print(f"Yielding control transition message: {transition_message}") + print('-'*100) + yield ('message', transition_message) + + # Create tool response for control transition + transition_response = { + 'content': json.dumps({ + 'assistant': parent_stack[-1].name + }), + 'role': 'tool', # Changed from 'function' to 'tool' + 'name': 'transfer_to_agent', + 'sender': None, + 'tool_calls': None, + 'tool_call_id': tool_call_id, + 'tool_name': 'transfer_to_agent' + } + print('-'*100) + print(f"Yielding control transition response: {transition_response}") + print('-'*100) + yield ('message', transition_response) + + current_agent = parent_stack.pop() + continue + elif not is_internal: + break + + except Exception as e: + print("\n=== Error in stream event processing ===") + print(f"Error: {str(e)}") + print("Event details:") + print(f"Event type: {event.type if hasattr(event, 'type') else 'unknown'}") + if hasattr(event, '__dict__'): + print(f"Event attributes: {event.__dict__}") + print(f"Full event object: {event}") + print(f"Traceback: {traceback.format_exc()}") + print("=" * 50) + raise + + # Break main loop if we've output an external message + if not is_internal_agent and current_agent.name in agent_message_counts: + break + + # Set final state + final_state = { + "last_agent_name": current_agent.name if current_agent else None, + "tokens": tokens_used, + "turn_messages": accumulated_messages + } + print('-'*100) + print(f"Yielding done: {final_state}") + print('-'*100) + yield ('done', {'state': final_state}) + + except Exception as e: + print(traceback.format_exc()) + print(f"Error in stream processing: {str(e)}") + yield ('error', {'error': str(e), 'state': final_state}) \ No newline at end of file diff --git a/rowboat_agents/src/graph/execute_turn.py b/rowboat_agents/src/graph/execute_turn.py new file mode 100644 index 000000000..ad5ae9b6e --- /dev/null +++ b/rowboat_agents/src/graph/execute_turn.py @@ -0,0 +1,730 @@ +import logging +import json +import aiohttp +import jwt +import hashlib +from agents import OpenAIChatCompletionsModel, trace, add_trace_processor + +# Import helper functions needed for get_agents +from .helpers.access import ( + get_tool_config_by_name, + get_tool_config_by_type +) +from .helpers.instructions import ( + add_rag_instructions_to_agent +) +from .types import outputVisibility +from agents import Agent as NewAgent, Runner, FunctionTool, RunContextWrapper, ModelSettings, WebSearchTool +from .tracing import AgentTurnTraceProcessor +# Add import for OpenAI functionality +from src.utils.common import generate_openai_output +from typing import Any +import asyncio +from mcp import ClientSession # Change back to original import +from mcp.client.sse import sse_client + +from pydantic import BaseModel +from typing import List, Optional, Dict +from .tool_calling import call_rag_tool +from pymongo import MongoClient +import os +MONGO_URI = os.environ.get("MONGODB_URI", "mongodb://localhost:27017/rowboat").strip() +mongo_client = MongoClient(MONGO_URI) +db = mongo_client["rowboat"] + +from src.utils.client import client as openai_client, PROVIDER_DEFAULT_MODEL + +class NewResponse(BaseModel): + messages: List[Dict] + agent: Optional[Any] = None + tokens_used: Optional[dict] = {} + error_msg: Optional[str] = "" + +# Add at the top of the file after imports +_request_cache = {} +_tool_call_in_progress = set() +_tool_execution_locks = {} # Dictionary to store locks per tool call + +async def get_tool_lock(call_key: str) -> asyncio.Lock: + """Get or create a lock for a specific tool call.""" + if call_key not in _tool_execution_locks: + _tool_execution_locks[call_key] = asyncio.Lock() + return _tool_execution_locks[call_key] + +async def mock_tool(tool_name: str, args: str, description: str, mock_instructions: str) -> str: + try: + print(f"Mock tool called for: {tool_name}") + + messages = [ + {"role": "system", "content": f"You are simulating the execution of a tool called '{tool_name}'.Here is the description of the tool: {description}. Here are the instructions for the mock tool: {mock_instructions}. Generate a realistic response as if the tool was actually executed with the given parameters."}, + {"role": "user", "content": f"Generate a realistic response for the tool '{tool_name}' with these parameters: {args}. The response should be concise and focused on what the tool would actually return."} + ] + + print(f"Generating simulated response for tool: {tool_name}") + response_content = None + response_content = generate_openai_output(messages, output_type='text', model=PROVIDER_DEFAULT_MODEL) + return response_content + except Exception as e: + print(f"Error in mock_tool: {str(e)}") + return f"Error: {str(e)}" + +async def call_webhook(tool_name: str, args: str, webhook_url: str, signing_secret: str) -> str: + try: + print(f"Calling webhook for tool: {tool_name}") + content_dict = { + "toolCall": { + "function": { + "name": tool_name, + "arguments": args + } + } + } + request_body = { + "content": json.dumps(content_dict) + } + + # Prepare headers + headers = {} + if signing_secret: + content_str = request_body["content"] + body_hash = hashlib.sha256(content_str.encode('utf-8')).hexdigest() + payload = {"bodyHash": body_hash} + signature_jwt = jwt.encode(payload, signing_secret, algorithm="HS256") + headers["X-Signature-Jwt"] = signature_jwt + + async with aiohttp.ClientSession() as session: + async with session.post(webhook_url, json=request_body, headers=headers) as response: + if response.status == 200: + response_json = await response.json() + return response_json.get("result", "") + else: + error_msg = await response.text() + print(f"Webhook error: {error_msg}") + return f"Error: {error_msg}" + except Exception as e: + print(f"Exception in call_webhook: {str(e)}") + return f"Error: Failed to call webhook - {str(e)}" + +async def call_mcp(tool_name: str, args: str, mcp_server_url: str) -> str: + try: + print(f"šŸ”„ MCP tool called for: {tool_name}") + mcp_server_url = mcp_server_url.strip() + print(f"🌐 MCP server URL: {mcp_server_url}") + + # Parse and clean arguments + try: + jargs = json.loads(args) + # Remove any mcp_servers from args if present + if 'mcp_servers' in jargs: + del jargs['mcp_servers'] + + # Normalize all string arguments to lowercase + normalized_args = {} + for key, value in jargs.items(): + if isinstance(value, str): + normalized_args[key] = value.lower().strip() + else: + normalized_args[key] = value + + print(f"šŸ“¤ Making MCP call with normalized args: {normalized_args}") + except json.JSONDecodeError as e: + print(f"āŒ Error parsing arguments: {str(e)}") + raise ValueError(f"Invalid JSON arguments: {str(e)}") + + # Create a unique key for this MCP call + call_key = f"{tool_name}:{json.dumps(normalized_args, sort_keys=True)}" + print(f"šŸ”‘ MCP call key: {call_key}") + + # Get the lock for this specific tool call + lock = await get_tool_lock(call_key) + + # Use the lock to prevent concurrent execution + async with lock: + # Check if we've already made this request + if call_key in _request_cache: + print(f"šŸ’¾ Returning cached MCP response for {call_key}") + return _request_cache[call_key] + + # Check if this call is already in progress + if call_key in _tool_call_in_progress: + print(f"āš ļø MCP call {call_key} already in progress") + cached_response = _request_cache.get(call_key) + if cached_response: + return cached_response + return json.dumps({ + "role": "tool", + "name": tool_name, + "content": [{"type": "text", "text": "MCP call in progress...", "annotations": None}] + }) + + # Mark this call as in progress + _tool_call_in_progress.add(call_key) + print(f"šŸ”’ Locked MCP call: {call_key}") + + try: + # Create SSE client with a single connection + try: + async with sse_client(url=mcp_server_url, timeout=60) as streams: + print(f"šŸ“” SSE streams received: {streams} (type: {type(streams)})") + + if not streams: + raise ValueError("No SSE streams received") + + # Ensure we have exactly two streams + if not isinstance(streams, (list, tuple)): + raise ValueError(f"Expected streams to be a list or tuple, got {type(streams)}") + + if len(streams) != 2: + raise ValueError(f"Expected exactly 2 SSE streams, got {len(streams)}") + + stream0, stream1 = streams[0], streams[1] + print(f"šŸ“” Successfully unpacked streams: {type(stream0)}, {type(stream1)}") + + # Create session and make the tool call + async with ClientSession(stream0, stream1) as session: + await session.initialize() + print(f"šŸ“” Making tool call to {tool_name} with normalized args") + response = await session.call_tool(tool_name, arguments=normalized_args) + + if not response: + raise ValueError("No response received from MCP server") + + # Handle the response content + if hasattr(response, 'content'): + content = response.content + if isinstance(content, list): + # Convert list items to text + content_list = [] + for item in content: + if hasattr(item, 'text'): + content_list.append(item.text) + else: + content_list.append(str(item)) + result = json.dumps({ + "role": "tool", + "name": tool_name, + "content": [{"type": "text", "text": "\n".join(content_list), "annotations": None}] + }) + else: + # Handle single item response + text = content.text if hasattr(content, 'text') else str(content) + result = json.dumps({ + "role": "tool", + "name": tool_name, + "content": [{"type": "text", "text": text, "annotations": None}] + }) + print(f"šŸ“„ MCP response processed successfully") + + # Cache the response + _request_cache[call_key] = result + print(f"šŸ’¾ Cached MCP response for {call_key}") + + return result + else: + raise ValueError("Response has no content attribute") + except Exception as e: + print(f"āŒ Error in SSE client: {str(e)}") + raise + finally: + # Remove the in-progress flag + _tool_call_in_progress.remove(call_key) + print(f"šŸ”“ Unlocked MCP call: {call_key}") + + except Exception as e: + print(f"āŒ Error in call_mcp: {str(e)}") + return json.dumps({ + "role": "tool", + "name": tool_name, + "content": [{"type": "text", "text": f"Error: {str(e)}", "annotations": None}] + }) + +async def catch_all(ctx: RunContextWrapper[Any], args: str, tool_name: str, tool_config: dict, complete_request: dict) -> str: + try: + print("\n" + "="*50) + print(f"TOOL CALL STARTED:") + print(f"Tool: {tool_name}") + print(f"Arguments: {args}") + print(f"Tool Config: {json.dumps(tool_config, indent=2)}") + print("="*50 + "\n") + + # Normalize arguments for consistent caching + try: + args_dict = json.loads(args) + # Convert all string values to lowercase for case-insensitive comparison + normalized_args = {k: v.lower() if isinstance(v, str) else v for k, v in args_dict.items()} + normalized_args_str = json.dumps(normalized_args, sort_keys=True) + except json.JSONDecodeError: + normalized_args_str = args + + # Create a unique key for this tool call using normalized arguments + call_key = f"{tool_name}:{normalized_args_str}" + print(f"šŸ”‘ Normalized cache key: {call_key}") + + # Get the lock for this specific tool call + lock = await get_tool_lock(call_key) + + # Use the lock to prevent concurrent execution + async with lock: + # Check if this tool call is already in progress + if call_key in _tool_call_in_progress: + print(f"āš ļø Tool call {call_key} already in progress, returning cached response") + cached_response = _request_cache.get(call_key) + if cached_response: + print(f"šŸ’¾ Found cached response for {call_key}") + return cached_response + print(f"āš ļø No cached response found for {call_key}, returning in-progress message") + return json.dumps({ + "role": "tool", + "name": tool_name, + "content": [{"type": "text", "text": "Tool call in progress...", "annotations": None}] + }) + + # Check if we already have a cached response + if call_key in _request_cache: + print(f"šŸ’¾ Returning cached response for {call_key}") + return _request_cache[call_key] + + # Mark this tool call as in progress + _tool_call_in_progress.add(call_key) + print(f"šŸ”’ Locked tool call: {call_key}") + + try: + # Create event loop for async operations + try: + loop = asyncio.get_event_loop() + except RuntimeError: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + if tool_config.get("mockTool", False) or complete_request.get("testProfile", {}).get("mockTools", False): + print(f"šŸ¤– Using mock tool for: {tool_name}") + # Call mock_tool to handle the response + if complete_request.get("testProfile", {}).get("mockPrompt", ""): + response_content = await mock_tool(tool_name, args, tool_config.get("description", ""), complete_request.get("testProfile", {}).get("mockPrompt", "")) + else: + response_content = await mock_tool(tool_name, args, tool_config.get("description", ""), tool_config.get("mockInstructions", "")) + print(f"šŸ“ Mock tool response: {response_content}") + response = json.dumps({ + "role": "tool", + "name": tool_name, + "content": [{"type": "text", "text": response_content, "annotations": None}] + }) + elif tool_config.get("isMcp", False): + print(f"šŸ”„ Using MCP tool: {tool_name}") + mcp_server_name = tool_config.get("mcpServerName", "") + mcp_servers = complete_request.get("mcpServers", {}) + mcp_server_url = next((server.get("url", "") for server in mcp_servers if server.get("name") == mcp_server_name), "") + print(f"🌐 MCP Server URL: {mcp_server_url}") + + # Make the MCP call and cache the response + try: + print(f"šŸ“” Making MCP call to {tool_name}") + response = await call_mcp(tool_name, normalized_args_str, mcp_server_url) + print(f"šŸ“„ MCP response received: {response[:200]}...") # Print first 200 chars + except Exception as e: + print(f"āŒ Error in MCP call: {str(e)}") + response = json.dumps({ + "role": "tool", + "name": tool_name, + "content": [{"type": "text", "text": f"Error: {str(e)}", "annotations": None}] + }) + else: + print(f"🌐 Using webhook for tool: {tool_name}") + collection = db["projects"] + doc = collection.find_one({"_id": complete_request.get("projectId", "")}) + signing_secret = doc.get("secret", "") + webhook_url = complete_request.get("toolWebhookUrl", "") + print(f"šŸ”— Webhook URL: {webhook_url}") + response_content = await call_webhook(tool_name, args, webhook_url, signing_secret) + print(f"šŸ“„ Webhook response: {response_content[:200]}...") # Print first 200 chars + response = json.dumps({ + "role": "tool", + "name": tool_name, + "content": [{"type": "text", "text": response_content, "annotations": None}] + }) + + # Cache the response + _request_cache[call_key] = response + print(f"šŸ’¾ Cached response for {tool_name}") + print("\n" + "="*50) + print(f"TOOL CALL COMPLETED: {tool_name}") + print("="*50 + "\n") + return response + + finally: + # Remove the in-progress flag + _tool_call_in_progress.remove(call_key) + print(f"šŸ”“ Unlocked tool call: {call_key}") + + except Exception as e: + print(f"āŒ Error in catch_all: {str(e)}") + return json.dumps({ + "role": "tool", + "name": tool_name, + "content": [{"type": "text", "text": f"Error: {str(e)}", "annotations": None}] + }) + + +def get_rag_tool(config: dict, complete_request: dict) -> FunctionTool: + """ + Creates a RAG tool based on the provided configuration. + """ + project_id = complete_request.get("projectId", "") + if config.get("ragDataSources", None): + print(f"Creating rag_search tool with params:\n-Data Sources: {config.get('ragDataSources', [])}\n-Return Type: {config.get('ragReturnType', 'chunks')}\n-K: {config.get('ragK', 3)}") + params = { + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "The query to search for" + } + }, + "additionalProperties": False, + "required": [ + "query" + ] + } + tool = FunctionTool( + name="rag_search", + description="Get information about an article", + params_json_schema=params, + on_invoke_tool=lambda ctx, args: call_rag_tool(project_id, json.loads(args)['query'], config.get("ragDataSources", []), config.get("ragReturnType", "chunks"), config.get("ragK", 3)) + ) + return tool + else: + return None + +DEFAULT_MAX_CALLS_PER_PARENT_AGENT = 3 + +def get_agents(agent_configs, tool_configs, complete_request): + """ + Creates and initializes Agent objects based on their configurations and connections. + """ + if not isinstance(agent_configs, list): + raise ValueError("Agents config is not a list in get_agents") + if not isinstance(tool_configs, list): + raise ValueError("Tools config is not a list in get_agents") + + new_agents = [] + new_agent_to_children = {} + new_agent_name_to_index = {} + + # Create a dictionary to store tool handlers + tool_handlers = {} + + # Create Agent objects from config + for agent_config in agent_configs: + print("="*100) + print(f"Processing config for agent: {agent_config['name']}") + + # If hasRagSources, append the RAG tool to the agent's tools + if agent_config.get("hasRagSources", False): + rag_tool_name = get_tool_config_by_type(tool_configs, "rag").get("name", "") + agent_config["tools"].append(rag_tool_name) + agent_config = add_rag_instructions_to_agent(agent_config, rag_tool_name) + + # Prepare tool lists for this agent + external_tools = [] + + print(f"Agent {agent_config['name']} has {len(agent_config['tools'])} configured tools") + + new_tools = [] + + for tool_name in agent_config["tools"]: + tool_config = get_tool_config_by_name(tool_configs, tool_name) + + if tool_config: + external_tools.append({ + "type": "function", + "function": tool_config + }) + if tool_name == "web_search": + tool = WebSearchTool() + elif tool_name == "rag_search": + tool = get_rag_tool(agent_config, complete_request) + else: + # Create or reuse tool handler + if tool_name not in tool_handlers: + def create_tool_handler(_tool_name, _tool_config, _complete_request): + async def handler(ctx, args): + try: + # Parse and normalize arguments + try: + args_dict = json.loads(args) + normalized_args = {k: v.lower() if isinstance(v, str) else v for k, v in args_dict.items()} + normalized_args_str = json.dumps(normalized_args, sort_keys=True) + except json.JSONDecodeError: + normalized_args_str = args + + # Create a unique key for this tool call + call_key = f"{_tool_name}:{normalized_args_str}" + print(f"šŸ”‘ Tool handler cache key: {call_key}") + + # Check if we've already made this request + if call_key in _request_cache: + print(f"šŸ’¾ Tool handler returning cached response for {call_key}") + return _request_cache[call_key] + + # Check if this tool call is already in progress + if call_key in _tool_call_in_progress: + print(f"āš ļø Tool handler found in-progress call for {call_key}") + cached_response = _request_cache.get(call_key) + if cached_response: + return cached_response + return json.dumps({ + "role": "tool", + "name": _tool_name, + "content": [{"type": "text", "text": "Tool call in progress...", "annotations": None}] + }) + + # Mark this tool call as in progress + _tool_call_in_progress.add(call_key) + print(f"šŸ”’ Tool handler locked call: {call_key}") + + try: + # Make the call and cache the response + response = await catch_all(ctx, normalized_args_str, _tool_name, _tool_config, _complete_request) + _request_cache[call_key] = response + return response + finally: + # Remove the in-progress flag + _tool_call_in_progress.remove(call_key) + print(f"šŸ”“ Tool handler unlocked call: {call_key}") + except Exception as e: + print(f"āŒ Error in tool handler: {str(e)}") + return json.dumps({ + "role": "tool", + "name": _tool_name, + "content": [{"type": "text", "text": f"Error: {str(e)}", "annotations": None}] + }) + return handler + + tool_handlers[tool_name] = create_tool_handler(tool_name, tool_config, complete_request) + + tool = FunctionTool( + name=tool_name, + description=tool_config["description"], + params_json_schema=tool_config["parameters"], + strict_json_schema=False, + on_invoke_tool=tool_handlers[tool_name] + ) + if tool: + new_tools.append(tool) + print(f"Added tool {tool_name} to agent {agent_config['name']}") + else: + print(f"WARNING: Tool {tool_name} not found in tool_configs") + + # Create the agent object + print(f"Creating Agent object for {agent_config['name']}") + + # add the name and description to the agent instructions + agent_instructions = f"## Your Name\n{agent_config['name']}\n\n## Description\n{agent_config['description']}\n\n## Instructions\n{agent_config['instructions']}" + + # Add tool-specific instructions dynamically + tool_instructions = [] + for tool in new_tools: + tool_config = get_tool_config_by_name(tool_configs, tool.name) + if tool_config: + # Generate tool-specific instructions based on the tool's configuration + tool_instructions.append(f""" +## {tool.name.title()} Instructions +- ALWAYS use the {tool.name} tool for any {tool_config['description'].lower()} +- NEVER provide information or perform operations manually that should be done by the {tool.name} tool +- NEVER make assumptions or provide information from your training data about topics that should be handled by the {tool.name} tool +- When using the {tool.name} tool, provide the required parameters as specified in the tool's configuration +- If you don't have all required parameters, ask the user for them before using the tool +- If the tool returns an error or no results, inform the user and ask for clarification or alternative information +""") + + if tool_instructions: + agent_instructions += "\n\n" + "\n\n".join(tool_instructions) + + try: + # Identify the model + model_name = agent_config["model"] if agent_config["model"] else PROVIDER_DEFAULT_MODEL + print(f"Using model: {model_name}") + model=OpenAIChatCompletionsModel(model=model_name, openai_client=openai_client) if openai_client else agent_config["model"] + + # Create the agent object + new_agent = NewAgent( + name=agent_config["name"], + instructions=agent_instructions, + handoff_description=agent_config["description"], + tools=new_tools, + model=model, + model_settings=ModelSettings( + temperature=0.0 + ) + ) + + # Set the max calls per parent agent + new_agent.max_calls_per_parent_agent = agent_config.get("maxCallsPerParentAgent", DEFAULT_MAX_CALLS_PER_PARENT_AGENT) + if not agent_config.get("maxCallsPerParentAgent", None): + print(f"WARNING: Max calls per parent agent not received for agent {new_agent.name}. Using rowboat_agents default of {DEFAULT_MAX_CALLS_PER_PARENT_AGENT}") + else: + print(f"Max calls per parent agent for agent {new_agent.name}: {new_agent.max_calls_per_parent_agent}") + + # Set output visibility + new_agent.output_visibility = agent_config.get("outputVisibility", outputVisibility.EXTERNAL.value) + if not agent_config.get("outputVisibility", None): + print(f"WARNING: Output visibility not received for agent {new_agent.name}. Using rowboat_agents default of {new_agent.output_visibility}") + else: + print(f"Output visibility for agent {new_agent.name}: {new_agent.output_visibility}") + + # Handle the connected agents + new_agent_to_children[agent_config["name"]] = agent_config.get("connectedAgents", []) + new_agent_name_to_index[agent_config["name"]] = len(new_agents) + new_agents.append(new_agent) + print(f"Successfully created agent: {agent_config['name']}") + except Exception as e: + print(f"ERROR: Failed to create agent {agent_config['name']}: {str(e)}") + raise + + for new_agent in new_agents: + # Initialize the handoffs attribute if it doesn't exist + if not hasattr(new_agent, 'handoffs'): + new_agent.handoffs = [] + # Look up the agent's children from the old agent and create a list called handoffs in new_agent with pointers to the children in new_agents + new_agent.handoffs = [new_agents[new_agent_name_to_index[child]] for child in new_agent_to_children[new_agent.name]] + + print("Returning created agents") + print("="*100) + return new_agents + +# Initialize a flag to track if the trace processor is added +trace_processor_added = False + +async def run_streamed( + agent, + messages, + external_tools=None, + tokens_used=None, + enable_tracing=False +): + """ + Wrapper function for initializing and running the Swarm client in streaming mode. + """ + print(f"Initializing streaming client for agent: {agent.name}") + + # Initialize default parameters + if external_tools is None: + external_tools = [] + if tokens_used is None: + tokens_used = {} + + # Format messages to ensure they're compatible with the OpenAI API + formatted_messages = [] + for msg in messages: + if isinstance(msg, dict) and "content" in msg: + formatted_msg = { + "role": msg.get("role", "user"), + "content": msg["content"] + } + # Convert function role to tool role for tool responses + if formatted_msg["role"] == "function": + formatted_msg["role"] = "tool" + formatted_messages.append(formatted_msg) + else: + formatted_messages.append({ + "role": "user", + "content": str(msg) + }) + + print("Beginning streaming run") + + try: + # Add our custom trace processor only if tracing is enabled + global trace_processor_added + if enable_tracing and not trace_processor_added: + trace_processor = AgentTurnTraceProcessor() + add_trace_processor(trace_processor) + trace_processor_added = True + + # Get the stream result without trace context first + stream_result = Runner.run_streamed(agent, formatted_messages) + + # Create a set to track processed tool calls + processed_tool_calls = set() + + # Wrap the stream_events to handle tool role conversion and prevent duplicates + original_stream_events = stream_result.stream_events + + async def wrapped_stream_events(): + try: + async for event in original_stream_events(): + # Handle tool calls + if isinstance(event, dict) and event.get("tool_calls"): + # Create a list to store unique tool calls + unique_tool_calls = [] + + for tool_call in event["tool_calls"]: + try: + # Parse and normalize the arguments + args = json.loads(tool_call['function']['arguments']) + normalized_args = {} + for key, value in args.items(): + if isinstance(value, str): + normalized_args[key] = value.lower().strip() + else: + normalized_args[key] = value + + # Create a unique key for this tool call + tool_call_key = f"{tool_call['function']['name']}:{json.dumps(normalized_args, sort_keys=True)}" + + # Skip if we've already processed this tool call + if tool_call_key in processed_tool_calls: + print(f"šŸ”„ Skipping duplicate tool call in stream: {tool_call_key}") + continue + + # Mark this tool call as processed + processed_tool_calls.add(tool_call_key) + + # Update the tool call with normalized arguments + tool_call['function']['arguments'] = json.dumps(normalized_args) + unique_tool_calls.append(tool_call) + print(f"āœ… Added unique tool call to stream: {tool_call_key}") + except json.JSONDecodeError as e: + print(f"āŒ Error parsing tool call arguments in stream: {str(e)}") + continue + + # Only yield the event if we have unique tool calls + if unique_tool_calls: + event["tool_calls"] = unique_tool_calls + yield event + else: + print("ā­ļø Skipping event with no unique tool calls") + continue + else: + # Convert function role to tool role for tool responses + if isinstance(event, dict) and event.get("role") == "function": + event["role"] = "tool" + yield event + except GeneratorExit: + # Handle generator exit gracefully + raise + except Exception as e: + print(f"āŒ Error in stream events: {str(e)}") + # Return a formatted error response + yield { + "role": "assistant", + "content": f"Error processing stream: {str(e)}", + "error": True + } + raise + + # Create a new stream result with our wrapped events + class WrappedStreamResult: + def __init__(self, original_result, wrapped_events): + self.original_result = original_result + self.stream_events = wrapped_events + # Copy any other attributes from the original result + for attr in dir(original_result): + if not attr.startswith('_') and not hasattr(self, attr): + setattr(self, attr, getattr(original_result, attr)) + + wrapped_result = WrappedStreamResult(stream_result, wrapped_stream_events) + return wrapped_result + except Exception as e: + print(f"āŒ Error during streaming run: {str(e)}") + raise \ No newline at end of file diff --git a/rowboat_agents/src/graph/guardrails.py b/rowboat_agents/src/graph/guardrails.py new file mode 100644 index 000000000..949516448 --- /dev/null +++ b/rowboat_agents/src/graph/guardrails.py @@ -0,0 +1,218 @@ +# Guardrails +from src.utils.common import generate_llm_output +import os +import copy + +from .execute_turn import Agent, Response, create_response + +from src.utils.common import common_logger, generate_openai_output, update_tokens_used +logger = common_logger + +def classify_hallucination(context: str, assistant_response: str, chat_history: list, model: str) -> str: + """ + Checks if an assistant's response contains hallucinations by comparing against provided context. + + Args: + context (str): The context/knowledge base to check the response against + assistant_response (str): The response from the assistant to validate + chat_history (list): List of previous chat messages for context + + Returns: + str: Verdict indicating level of hallucination: + 'yes-absolute' - completely supported by context + 'yes-common-sensical' - supported with common sense interpretation + 'no-absolute' - not supported by context + 'no-subtle' - not supported but difference is subtle + """ + chat_history_str = "\n".join([f"{message['role']}: {message['content']}" for message in chat_history]) + + prompt = f""" + You are a guardrail agent. Your job is to check if the response is hallucinating. + + ------------------------------------------------------------------------ + Here is the context: + {context} + + ------------------------------------------------------------------------ + Here is the chat history message: + {chat_history_str} + + ------------------------------------------------------------------------ + Here is the response: + {assistant_response} + + ------------------------------------------------------------------------ + As a hallucination guardrail, your job is to go through each line of the response and check if it is completely supported by the context. Even if a single line is not supported, the response is no. + + Output a single verdict for the entire response. don't provide any reasoning. The output classes are + + yes-absolute: completely supported by the context + yes-common-sensical: but with some common sense interpretation + no-absolute: not supported by the context + no-subtle: not supported by the context but the difference is subtle + + Output of of the classes: + verdict : yes-absolute/yes-common-sensical/no-absolute/no-subtle + + Example 1: The response is completely supported by the context. + User Input: + Context: "Our airline provides complimentary meals and beverages on all international flights. Passengers are allowed one carry-on bag and one personal item." + Chat History: + User: "Do international flights with your airline offer free meals?" + Response: "Yes, all international flights with our airline offer free meals and beverages." + Output: verdict: yes-absolute + + Example 2: The response is generally true and could be deduced with common sense interpretation, though not explicitly stated in the context. + User Input: + Context: "Flights may experience delays due to weather conditions. In such cases, the airline staff will provide updates at the airport." + Chat History: + User: "Will there be announcements if my flight is delayed?" + Response: "Yes, if your flight is delayed, there will be announcements at the airport." + Output: verdict: yes-common-sensical + + Example 3: The response is not supported by the context and contains glaring inaccuracies. + User Input: + Context: "You can cancel your ticket online up to 24 hours before the flight's departure time and receive a full refund." + Chat History: + User: "Can I get a refund if I cancel 12 hours before the flight?" + Response: "Yes, you can get a refund if you cancel 12 hours before the flight." + Output: verdict: no-absolute + + Example 4: The response is not supported by the context but the difference is subtle. + User Input: + Context: "Our frequent flyer program offers discounts on checked bags for members who have achieved Gold status." + Chat History: + User: "As a member, do I get discounts on checked bags?" + Response: "Yes, members of our frequent flyer program get discounts on checked bags." + Output: verdict: no-subtle + """ + messages = [ + { + "role": "system", + "content": prompt, + }, + ] + response = generate_llm_output(messages, model) + return response + +def post_process_response(messages: list, post_processing_agent_name: str, post_process_instructions: str, style_prompt: str = None, context: str = None, model: str = "gpt-4o", tokens_used: dict = {}, last_agent: Agent = None) -> dict: + agent_instructions = last_agent.instructions + agent_history = last_agent.history + # agent_instructions = '' + # agent_history = [] + + pending_msg = copy.deepcopy(messages[-1]) + logger.debug(f"Pending message keys: {pending_msg.keys()}") + + skip = False + + if pending_msg.get("tool_calls"): + logger.info("Last message is a tool call, skipping post processing and setting last message to external") + skip = True + + elif not pending_msg['response_type'] == "internal": + logger.info("Last message is not internal, skipping post processing and setting last message to external") + skip = True + + elif not pending_msg['content']: + logger.info("Last message has no content, skipping post processing and setting last message to external") + skip = True + + elif not post_process_instructions: + logger.info("No post process instructions, skipping post processing and setting last message to external") + skip = True + + if skip: + pending_msg['response_type'] = "external" + response = Response( + messages=[], + tokens_used=tokens_used, + agent=last_agent, + error_msg='' + ) + return response + + agent_history_str = f"\n{'*'*100}\n".join([f"Role: {message['role']} | Content: {message.get('content', 'None')} | Tool Calls: {message.get('tool_calls', 'None')}" for message in agent_history[:-1]]) + logger.debug(f"Agent history: {agent_history_str}") + + prompt = f""" + # ROLE + + You are a post processing agent responsible for rewriting a response generated by an agent, according to instructions provided below. Ensure that the response you produce adheres to the instructions provided to you (if any). + ------------------------------------------------------------------------ + + # ADDITIONAL INSTRUCTIONS + + Here are additional instructions that the admin might have configured for you: + {post_process_instructions} + + ------------------------------------------------------------------------ + + # CHAT HISTORY + + Here is the chat history: + {agent_history_str} + """ + if context: + context_prompt = f""" + ------------------------------------------------------------------------ + # CONTEXT + + Here is the context: + {context} + """ + prompt += context_prompt + + if style_prompt: + style_prompt = f""" + ------------------------------------------------------------------------ + # STYLE PROMPT + + Here is the style prompt: + {style_prompt} + """ + prompt += style_prompt + + agent_response_and_instructions = f""" + + ------------------------------------------------------------------------ + # AGENT INSTRUCTIONS + + Here are the instructions to the agent generating the response: + {agent_instructions} + + ------------------------------------------------------------------------ + # AGENT RESPONSE + + Here is the response that the agent has generated: + {pending_msg['content']} + + """ + prompt += agent_response_and_instructions + + logger.debug(f"Sanitizing response for style. Original response: {pending_msg['content']}") + completion = generate_openai_output( + messages=[ + {"role": "system", "content": prompt} + ], + model = model, + return_completion=True + ) + content = completion.choices[0].message.content + if content: + content = content.strip().lstrip().rstrip() + tokens_used = update_tokens_used(provider="openai", model=model, tokens_used=tokens_used, completion=completion) + logger.debug(f"Response after style check: {content}, tokens used: {tokens_used}") + + pending_msg['content'] = content if content else pending_msg['content'] + pending_msg['response_type'] = "external" + pending_msg['sender'] = pending_msg['sender'] + f' >> {post_processing_agent_name}' + + response = Response( + messages=[pending_msg], + tokens_used=tokens_used, + agent=last_agent, + error_msg='' + ) + + return response diff --git a/rowboat_agents/src/graph/helpers/access.py b/rowboat_agents/src/graph/helpers/access.py new file mode 100644 index 000000000..73f53cd6e --- /dev/null +++ b/rowboat_agents/src/graph/helpers/access.py @@ -0,0 +1,48 @@ +from src.utils.common import common_logger +logger = common_logger + +def get_external_tools(tool_configs): + logger.debug("Getting external tools") + tools = [tool["name"] for tool in tool_configs] + logger.debug(f"Found {len(tools)} external tools") + return tools + +def get_agent_by_name(agent_name, agents): + agent = next((a for a in agents if getattr(a, "name", None) == agent_name), None) + if not agent: + logger.error(f"Agent with name {agent_name} not found") + raise ValueError(f"Agent with name {agent_name} not found") + return agent + +def get_agent_config_by_name(agent_name, agent_configs): + agent_config = next((ac for ac in agent_configs if ac.get("name") == agent_name), None) + if not agent_config: + logger.error(f"Agent config with name {agent_name} not found") + raise ValueError(f"Agent config with name {agent_name} not found") + return agent_config + +def pop_agent_config_by_type(agent_configs, agent_type): + agent_config = next((ac for ac in agent_configs if ac.get("type") == agent_type), None) + if agent_config: + agent_configs.remove(agent_config) + return agent_config, agent_configs + +def get_agent_by_type(agents, agent_type): + return next((a for a in agents if a.type == agent_type), None) + +def get_prompt_by_type(prompt_configs, prompt_type): + return next((pc.get("prompt") for pc in prompt_configs if pc.get("type") == prompt_type), None) + +def get_agent_data_by_name(agent_name, agent_data): + for data in agent_data: + name = data.get("name", "") + if name == agent_name: + return data + + return None + +def get_tool_config_by_name(tool_configs, tool_name): + return next((tc for tc in tool_configs if tc.get("name", "") == tool_name), None) + +def get_tool_config_by_type(tool_configs, tool_type): + return next((tc for tc in tool_configs if tc.get("type", "") == tool_type), None) diff --git a/rowboat_agents/src/graph/helpers/control.py b/rowboat_agents/src/graph/helpers/control.py new file mode 100644 index 000000000..d49284272 --- /dev/null +++ b/rowboat_agents/src/graph/helpers/control.py @@ -0,0 +1,48 @@ +from .access import get_agent_config_by_name, get_agent_data_by_name +from src.graph.types import ControlType + +def get_last_agent_name(state, agent_configs, start_agent_name, msg_type, latest_assistant_msg, start_turn_with_start_agent): + default_last_agent_name = state.get("last_agent_name", '') + last_agent_config = get_agent_config_by_name(default_last_agent_name, agent_configs) + specific_agent_data = get_agent_data_by_name(default_last_agent_name, state.get("agent_data", [])) + + # Overrides for special cases + print("Setting agent control based on last agent and control type") + if msg_type == "tool": + last_agent_name = default_last_agent_name + assert last_agent_name == latest_assistant_msg.get("sender", ''), "Last agent name does not match sender of latest assistant message during tool call handling" + + elif start_turn_with_start_agent: + last_agent_name = start_agent_name + + else: + control_type = last_agent_config.get("controlType", ControlType.RETAIN.value) + if control_type == ControlType.PARENT_AGENT.value: + last_agent_name = specific_agent_data.get("most_recent_parent_name", None) if specific_agent_data else None + if not last_agent_name: + print("Most recent parent is empty, defaulting to same agent instead") + last_agent_name = default_last_agent_name + elif control_type == ControlType.START_AGENT.value: + last_agent_name = start_agent_name + else: + last_agent_name = default_last_agent_name + + if default_last_agent_name != last_agent_name: + print(f"Last agent name changed from {default_last_agent_name} to {last_agent_name} due to control settings") + + return last_agent_name + + +def get_latest_assistant_msg(messages): + # Find the latest message with role assistant + for i in range(len(messages)-1, -1, -1): + if messages[i].get("role") == "assistant": + return messages[i] + return None + +def get_latest_non_assistant_messages(messages): + # Find all messages after the last assistant message + for i in range(len(messages)-1, -1, -1): + if messages[i].get("role") == "assistant": + return messages[i+1:] + return messages \ No newline at end of file diff --git a/rowboat_agents/src/graph/helpers/instructions.py b/rowboat_agents/src/graph/helpers/instructions.py new file mode 100644 index 000000000..e9fc92b57 --- /dev/null +++ b/rowboat_agents/src/graph/helpers/instructions.py @@ -0,0 +1,44 @@ +from src.graph.instructions import TRANSFER_CHILDREN_INSTRUCTIONS, TRANSFER_PARENT_AWARE_INSTRUCTIONS, RAG_INSTRUCTIONS, ERROR_ESCALATION_AGENT_INSTRUCTIONS, TRANSFER_GIVE_UP_CONTROL_INSTRUCTIONS, SYSTEM_MESSAGE, CHILD_TRANSFER_RELATED_INSTRUCTIONS + +def add_transfer_instructions_to_parent_agents(agent, children, transfer_functions): + other_agent_name_descriptions_tools = f'\n{'-'*100}\n'.join([f"Name: {agent.name}\nDescription: {agent.description if agent.description else ''}\nTool for transfer: {transfer_functions[agent.name].__name__}" for agent in children.values()]) + + prompt = TRANSFER_CHILDREN_INSTRUCTIONS.format(other_agent_name_descriptions_tools=other_agent_name_descriptions_tools) + agent.instructions = agent.instructions + f'\n\n{'-'*100}\n\n' + prompt + + return agent + +def add_transfer_instructions_to_child_agents(child, children_aware_of_parent): + if children_aware_of_parent: + candidate_parents_name_description_tools = f'\n{'-'*100}\n'.join([f"Name: {parent_name}\nTool for transfer: {func.__name__}" for parent_name, func in child.candidate_parent_functions.items()]) + prompt = TRANSFER_PARENT_AWARE_INSTRUCTIONS.format(candidate_parents_name_description_tools=candidate_parents_name_description_tools) + else: + candidate_parents_name_description_tools = f'\n{'-'*100}\n'.join(list(set([f"Tool for transfer: {func.__name__}" for _, func in child.candidate_parent_functions.items()]))) + prompt = TRANSFER_GIVE_UP_CONTROL_INSTRUCTIONS.format(candidate_parents_name_description_tools=candidate_parents_name_description_tools) + + child.instructions = child.instructions + f'\n\n{'-'*100}\n\n' + prompt + return child + +def add_rag_instructions_to_agent(agent_config, rag_tool_name): + prompt = RAG_INSTRUCTIONS.format(rag_tool_name=rag_tool_name) + agent_config["instructions"] = agent_config["instructions"] + f'\n\n{'-'*100}\n\n' + prompt + return agent_config + +def add_error_escalation_instructions(agent): + prompt = ERROR_ESCALATION_AGENT_INSTRUCTIONS + agent.instructions = agent.instructions + f'\n\n{'-'*100}\n\n' + prompt + return agent + +def get_universal_system_message(messages): + if messages and messages[0].get("role") == "system": + return SYSTEM_MESSAGE.format(system_message=messages[0].get("content")) + return "" + +def add_universal_system_message_to_agent(agent, universal_sys_msg): + agent.instructions = agent.instructions + f'\n\n{'-'*100}\n\n' + universal_sys_msg + return agent + +def add_child_transfer_related_instructions(agent): + prompt = CHILD_TRANSFER_RELATED_INSTRUCTIONS + agent.instructions = agent.instructions + f'\n\n{'-'*100}\n\n' + prompt + return agent diff --git a/rowboat_agents/src/graph/helpers/library_tools.py b/rowboat_agents/src/graph/helpers/library_tools.py new file mode 100644 index 000000000..f32f926aa --- /dev/null +++ b/rowboat_agents/src/graph/helpers/library_tools.py @@ -0,0 +1,268 @@ +import json +import uuid +import traceback + +def handle_web_search_event(event, current_agent): + """ + Helper function to handle all web search related events. + Returns a list of messages to yield. + """ + messages = [] + + try: + # Handle raw response web search + if event.type == "raw_response_event": + if hasattr(event, 'data') and hasattr(event.data, 'raw_item'): + raw_item = event.data.raw_item + if (hasattr(raw_item, 'type') and raw_item.type == 'web_search_call') or ( + isinstance(raw_item, dict) and raw_item.get('type') == 'web_search_call' + ): + call_id = None + if hasattr(raw_item, 'id'): + call_id = raw_item.id + elif isinstance(raw_item, dict) and 'id' in raw_item: + call_id = raw_item['id'] + else: + call_id = str(uuid.uuid4()) + + status = 'unknown' + if hasattr(raw_item, 'status'): + status = raw_item.status + elif isinstance(raw_item, dict) and 'status' in raw_item: + status = raw_item['status'] + tool_call_msg = { + 'content': None, + 'role': 'assistant', + 'sender': current_agent.name if current_agent else None, + 'tool_calls': [{ + 'function': { + 'name': 'web_search', + 'arguments': json.dumps({ + 'search_id': call_id, + 'status': status + }) + }, + 'id': call_id, + 'type': 'function' + }], + 'tool_call_id': None, + 'tool_name': None, + 'response_type': 'internal' + } + print(f"Condition for tool call matched in raw_response_event. Appending tool call message: {tool_call_msg}") + messages.append(tool_call_msg) + + tool_call_output_dummy_msg = { + 'content': 'Web search completed.', + 'role': 'tool', + 'sender': None, + 'tool_calls': None, + 'tool_call_id': call_id, + 'tool_name': 'web_search', + 'response_type': 'internal' + } + messages.append(tool_call_output_dummy_msg) + + # Handle run item web search events + elif event.type == "run_item_stream_event": + if event.item.type == "tool_call_item": + try: + # Check if it's a web search call + if hasattr(event.item.raw_item, 'type') and event.item.raw_item.type == 'web_search_call': + call_id = event.item.raw_item.id if hasattr(event.item.raw_item, 'id') else str(uuid.uuid4()) + tool_call_msg = { + 'content': None, + 'role': 'assistant', + 'sender': current_agent.name if current_agent else None, + 'tool_calls': [{ + 'function': { + 'name': 'web_search', + 'arguments': json.dumps({ + 'search_id': call_id + }) + }, + 'id': call_id, + 'type': 'function' + }], + 'tool_call_id': None, + 'tool_name': None, + 'response_type': 'internal' + } + print(f"Condition for tool call matched in run_item_stream_event. Appending tool call message: {tool_call_msg}") + messages.append(tool_call_msg) + + tool_call_output_dummy_msg = { + 'content': 'Web search completed.', + 'role': 'tool', + 'sender': None, + 'tool_calls': None, + 'tool_call_id': call_id, + 'tool_name': 'web_search', + 'response_type': 'internal' + } + messages.append(tool_call_output_dummy_msg) + else: + # Handle regular tool calls + tool_call_msg = { + 'content': None, + 'role': 'assistant', + 'sender': current_agent.name if current_agent else None, + 'tool_calls': [{ + 'function': { + 'name': event.item.raw_item.name, + 'arguments': event.item.raw_item.arguments + }, + 'id': event.item.raw_item.call_id, + 'type': 'function' + }], + 'tool_call_id': None, + 'tool_name': None, + 'response_type': 'internal' + } + print(f"Condition for tool call matched in run_item_stream_event. Appending tool call message: {tool_call_msg}") + messages.append(tool_call_msg) + + tool_call_output_dummy_msg = { + 'content': 'Web search completed.', + 'role': 'tool', + 'sender': None, + 'tool_calls': None, + 'tool_call_id': call_id, + 'tool_name': 'web_search', + 'response_type': 'internal' + } + messages.append(tool_call_output_dummy_msg) + except Exception as e: + print("\n=== Error in tool_call_item handling ===") + print(f"Error: {str(e)}") + print(f"Event type: {event.type}") + print(f"Event item type: {event.item.type}") + print("Event details:") + print(f"Raw item: {event.item.raw_item}") + if hasattr(event.item.raw_item, '__dict__'): + print(f"Raw item attributes: {event.item.raw_item.__dict__}") + print(f"Traceback: {traceback.format_exc()}") + print("=" * 50) + raise + + elif event.item.type == "tool_call_output_item": + if isinstance(event.item.raw_item, dict) and event.item.raw_item.get('type') == 'web_search_results': + call_id = event.item.raw_item.get('search_id', event.item.raw_item.get('id', str(uuid.uuid4()))) + tool_call_output_msg = { + 'content': str(event.item.output), + 'role': 'tool', + 'sender': None, + 'tool_calls': None, + 'tool_call_id': call_id, + 'tool_name': 'web_search', + 'response_type': 'internal' + } + print(f"Condition for tool call output matched in run_item_stream_event. Appending tool call output message: {tool_call_output_msg}") + messages.append(tool_call_output_msg) + + elif event.item.type == "web_search_call_item" or ( + hasattr(event.item, 'raw_item') and + hasattr(event.item.raw_item, 'type') and + event.item.raw_item.type == 'web_search_call' + ): + call_id = None + if hasattr(event.item.raw_item, 'id'): + call_id = event.item.raw_item.id + tool_call_msg = { + 'content': None, + 'role': 'assistant', + 'sender': current_agent.name if current_agent else None, + 'tool_calls': [{ + 'function': { + 'name': 'web_search', + 'arguments': json.dumps({ + 'search_id': call_id + }) + }, + 'id': call_id or str(uuid.uuid4()), + 'type': 'function' + }], + 'tool_call_id': None, + 'tool_name': None, + 'response_type': 'internal' + } + print(f"Condition for tool call matched in run_item_stream_event. Appending tool call message: {tool_call_msg}") + messages.append(tool_call_msg) + tool_call_output_dummy_msg = { + 'content': 'Web search completed.', + 'role': 'tool', + 'sender': None, + 'tool_calls': None, + 'tool_call_id': call_id, + 'tool_name': 'web_search', + 'response_type': 'internal' + } + messages.append(tool_call_output_dummy_msg) + + elif event.item.type == "web_search_results_item" or ( + hasattr(event.item, 'raw_item') and ( + (hasattr(event.item.raw_item, 'type') and event.item.raw_item.type == 'web_search_results') or + (isinstance(event.item.raw_item, dict) and event.item.raw_item.get('type') == 'web_search_results') + ) + ): + raw_item = event.item.raw_item + call_id = None + + if hasattr(raw_item, 'search_id'): + call_id = raw_item.search_id + elif isinstance(raw_item, dict) and 'search_id' in raw_item: + call_id = raw_item['search_id'] + elif hasattr(raw_item, 'id'): + call_id = raw_item.id + elif isinstance(raw_item, dict) and 'id' in raw_item: + call_id = raw_item['id'] + else: + call_id = str(uuid.uuid4()) + + results = {} + if hasattr(event.item, 'output'): + results = event.item.output + elif hasattr(raw_item, 'results'): + results = raw_item.results + elif isinstance(raw_item, dict) and 'results' in raw_item: + results = raw_item['results'] + + results_str = "" + try: + results_str = json.dumps(results) if results else "" + except Exception as e: + print(f"Error serializing results: {str(e)}") + results_str = str(results) + + tool_call_output_msg = { + 'content': results_str, + 'role': 'tool', + 'sender': None, + 'tool_calls': None, + 'tool_call_id': call_id, + 'tool_name': 'web_search', + 'response_type': 'internal' + } + print(f"Condition for tool call output matched in run_item_stream_event. Appending tool call output message: {tool_call_output_msg}") + messages.append(tool_call_output_msg) + + except Exception as e: + print("\n=== Error in handle_web_search_event ===") + print(f"Error: {str(e)}") + print(f"Event type: {event.type}") + if hasattr(event, 'item'): + print(f"Event item type: {event.item.type}") + print("Event item details:") + print(f"Raw item: {event.item.raw_item}") + if hasattr(event.item.raw_item, '__dict__'): + print(f"Raw item attributes: {event.item.raw_item.__dict__}") + print(f"Traceback: {traceback.format_exc()}") + print("=" * 50) + raise + + if messages: + print("-"*100) + print(f"Web search related messages: {messages}") + print("-"*100) + + return messages diff --git a/rowboat_agents/src/graph/helpers/state.py b/rowboat_agents/src/graph/helpers/state.py new file mode 100644 index 000000000..c1b2427c6 --- /dev/null +++ b/rowboat_agents/src/graph/helpers/state.py @@ -0,0 +1,60 @@ +from src.utils.common import common_logger +logger = common_logger +from .access import get_agent_data_by_name + +def reset_current_turn(messages): + # Set all messages' current_turn to False + for msg in messages: + msg["current_turn"] = False + + # Find most recent user message + messages[-1]["current_turn"] = True + + return messages + +def reset_current_turn_agent_history(agent_data, agent_names): + for name in agent_names: + data = get_agent_data_by_name(name, agent_data) + if data: + for msg in data["history"]: + msg["current_turn"] = False + return agent_data + +def add_recent_messages_to_history(recent_messages, last_agent_name, agent_data, messages, parent_has_child_history): + last_msg = messages[-1] + specific_agent_data = get_agent_data_by_name(last_agent_name, agent_data) + if specific_agent_data: + specific_agent_data["history"].extend(recent_messages) + if parent_has_child_history: + current_agent_data = specific_agent_data + while current_agent_data.get("most_recent_parent_name"): + parent_name = current_agent_data.get("most_recent_parent_name") + parent_agent_data = get_agent_data_by_name(parent_name, agent_data) + if parent_agent_data: + parent_agent_data["history"].extend(recent_messages) + current_agent_data = parent_agent_data + else: + logger.error(f"Parent agent data for {current_agent_data['name']} not found in agent_data") + raise ValueError(f"Parent agent data for {current_agent_data['name']} not found in agent_data") + else: + agent_data.append({ + "name": last_agent_name, + "history": [last_msg] + }) + + return agent_data + +def construct_state_from_response(response, agents): + agent_data = [] + for agent in agents: + agent_data.append({ + "name": agent.name, + "instructions": agent.instructions + }) + + state = { + "last_agent_name": response.agent.name, + "agent_data": agent_data + } + + return state \ No newline at end of file diff --git a/rowboat_agents/src/graph/helpers/transfer.py b/rowboat_agents/src/graph/helpers/transfer.py new file mode 100644 index 000000000..0c7436d3e --- /dev/null +++ b/rowboat_agents/src/graph/helpers/transfer.py @@ -0,0 +1,44 @@ +from src.utils.common import common_logger +logger = common_logger + +def create_transfer_function_to_agent(agent): + agent_name = agent.name + + fn_spec = { + "name": f"transfer_to_{agent_name.lower().replace(' ', '_')}", + "description": f"Function to transfer the chat to {agent_name}.", + "return_value": agent + } + + def generated_function(*args, **kwargs): + logger.info(f"Transferring chat to {agent_name}") + return fn_spec.get('return_value', None) + + generated_function.__name__ = fn_spec['name'] + generated_function.__doc__ = fn_spec.get('description', '') + + return generated_function + +def create_transfer_function_to_parent_agent(parent_agent, children_aware_of_parent, transfer_functions): + if children_aware_of_parent: + name = f"{transfer_functions[parent_agent.name].__name__}_from_child" + description = f"Function to transfer the chat to your parent agent: {parent_agent.name}." + else: + name = "give_up_chat_control" + description = "Function to give up control of the chat when you are unable to handle it." + + + fn_spec = { + "name": name, + "description": description, + "return_value": parent_agent + } + + def generated_function(*args, **kwargs): + logger.info(f"Transferring chat to parent agent: {parent_agent.name}") + return fn_spec.get('return_value', None) + + generated_function.__name__ = fn_spec['name'] + generated_function.__doc__ = fn_spec.get('description', '') + + return generated_function \ No newline at end of file diff --git a/rowboat_agents/src/graph/instructions.py b/rowboat_agents/src/graph/instructions.py new file mode 100644 index 000000000..493550b5a --- /dev/null +++ b/rowboat_agents/src/graph/instructions.py @@ -0,0 +1,105 @@ +######################## +# Instructions for agents that use RAG +######################## +RAG_INSTRUCTIONS = f""" +# Instructions about using the article retrieval tool +- Where relevant, use the articles tool: {{rag_tool_name}} to fetch articles with knowledge relevant to the query and use its contents to respond to the user. +- Do not send a separate message first asking the user to wait while you look up information. Immediately fetch the articles and respond to the user with the answer to their query. +- Do not make up information. If the article's contents do not have the answer, give up control of the chat (or transfer to your parent agent, as per your transfer instructions). Do not say anything to the user. +""" + +######################## +# Instructions for child agents that are aware of parent agents +######################## +TRANSFER_PARENT_AWARE_INSTRUCTIONS = f""" +# Instructions about using your parent agents +You have the following candidate parent agents that you can transfer the chat to, using the appropriate tool calls for the transfer: +{{candidate_parents_name_description_tools}}. + +## Notes: +- During runtime, you will be provided with a tool call for exactly one of these parent agents that you can use. Use that tool call to transfer the chat to the parent agent in case you are unable to handle the chat (e.g. if it is not in your scope of instructions). +- Transfer the chat to the appropriate agent, based on the chat history and / or the user's request. +- When you transfer the chat to another agent, you should not provide any response to the user. For example, do not say 'Transferring chat to X agent' or anything like that. Just invoke the tool call to transfer to the other agent. +- Do NOT ever mention the existence of other agents. For example, do not say 'Please check with X agent for details regarding processing times.' or anything like that. +- If any other agent transfers the chat to you without responding to the user, it means that they don't know how to help. Do not transfer the chat to back to the same agent in this case. In such cases, you should transfer to the escalation agent using the appropriate tool call. Never ask the user to contact support. +""" + +######################## +# Instructions for child agents that give up control to parent agents +######################## +TRANSFER_GIVE_UP_CONTROL_INSTRUCTIONS = f""" +# Instructions about giving up chat control +If you are unable to handle the chat (e.g. if it is not in your scope of instructions), you should use the tool call provided to give up control of the chat. +{{candidate_parents_name_description_tools}} + +## Notes: +- When you give up control of the chat, you should not provide any response to the user. Just invoke the tool call to give up control. +""" + +######################## +# Instructions for parent agents that need to transfer the chat to other specialized (children) agents +######################## +TRANSFER_CHILDREN_INSTRUCTIONS = f""" +# Instructions about using other specialized agents +You have the following specialized agents that you can transfer the chat to, using the appropriate tool calls for the transfer: +{{other_agent_name_descriptions_tools}} + +## Notes: +- Transfer the chat to the appropriate agent, based on the chat history and / or the user's request. +- When you transfer the chat to another agent, you should not provide any response to the user. For example, do not say 'Transferring chat to X agent' or anything like that. Just invoke the tool call to transfer to the other agent. +- Do NOT ever mention the existence of other agents. For example, do not say 'Please check with X agent for details regarding processing times.' or anything like that. +- If any other agent transfers the chat to you without responding to the user, it means that they don't know how to help. Do not transfer the chat to back to the same agent in this case. In such cases, you should transfer to the escalation agent using the appropriate tool call. Never ask the user to contact support. +""" + + +######################## +# Additional instruction for escalation agent when called due to an error +######################## +ERROR_ESCALATION_AGENT_INSTRUCTIONS = f""" +# Context +The rest of the parts of the chatbot were unable to handle the chat. Hence, the chat has been escalated to you. In addition to your other instructions, tell the user that you are having trouble handling the chat - say "I'm having trouble helping with your request. Sorry about that.". Remember you are a part of the chatbot as well. +""" + + +######################## +# Universal system message formatting +######################## +SYSTEM_MESSAGE = f""" +# Additional System-Wide Context or Instructions: +{{system_message}} +""" + +######################## +# Instructions for non-repeat child transfer +######################## +CHILD_TRANSFER_RELATED_INSTRUCTIONS = f""" +# Critical Rules for Agent Transfers and Handoffs + +- SEQUENTIAL TRANSFERS AND RESPONSES: + 1. BEFORE transferring to any agent: + - Plan your complete sequence of needed transfers + - Document which responses you need to collect + + 2. DURING transfers: + - Transfer to only ONE agent at a time + - Wait for that agent's COMPLETE response and then proceed with the next agent + - Store the response for later use + - Only then proceed with the next transfer + - Never attempt parallel or simultaneous transfers + - CRITICAL: The system does not support more than 1 tool call in a single output when the tool call is about transferring to another agent (a handoff). You must only put out 1 transfer related tool call in one output. + + 3. AFTER receiving a response: + - Do not transfer to another agent until you've processed the current response + - If you need to transfer to another agent, wait for your current processing to complete + - Never transfer back to an agent that has already responded + +- COMPLETION REQUIREMENTS: + - Never provide final response until ALL required agents have been consulted + - Never attempt to get multiple responses in parallel + - If a transfer is rejected due to multiple handoffs: + 1. Complete current response processing + 2. Then retry the transfer as next in sequence + 3. Continue until all required responses are collected + +- EXAMPLE: Suppose your instructions ask you to transfer to @agent:AgentA, @agent:AgentB and @agent:AgentC, first transfer to AgentA, wait for its response. Then transfer to AgentB, wait for its response. Then transfer to AgentC, wait for its response. Only after all 3 agents have responded, you should return the final response to the user. +""" \ No newline at end of file diff --git a/rowboat_agents/src/graph/tool_calling.py b/rowboat_agents/src/graph/tool_calling.py new file mode 100644 index 000000000..6457d6c91 --- /dev/null +++ b/rowboat_agents/src/graph/tool_calling.py @@ -0,0 +1,148 @@ +from bson.objectid import ObjectId +from openai import OpenAI +import os +from motor.motor_asyncio import AsyncIOMotorClient +import asyncio +from dataclasses import dataclass +from typing import Dict, List, Any +from qdrant_client import QdrantClient +import json +# Initialize MongoDB client +mongo_uri = os.environ.get("MONGODB_URI", "mongodb://localhost:27017") +mongo_client = AsyncIOMotorClient(mongo_uri) +db = mongo_client.rowboat +data_sources_collection = db['sources'] +data_source_docs_collection = db['source_docs'] + + +qdrant_client = QdrantClient( + url=os.environ.get("QDRANT_URL"), + api_key=os.environ.get("QDRANT_API_KEY") or None +) +# Initialize OpenAI client +client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY")) + +# Define embedding model +embedding_model = "text-embedding-3-small" + +async def embed(model: str, value: str) -> dict: + """ + Generate embeddings using OpenAI's embedding models. + + Args: + model (str): The embedding model to use (e.g., "text-embedding-3-small"). + value (str): The text to embed. + + Returns: + dict: A dictionary containing the embedding. + """ + response = client.embeddings.create( + model=model, + input=value + ) + return {"embedding": response.data[0].embedding} + +async def call_rag_tool( + project_id: str, + query: str, + source_ids: list[str], + return_type: str, + k: int, +) -> dict: + """ + Runs the RAG tool call to retrieve information based on the query and source IDs. + + Args: + project_id (str): The ID of the project. + query (str): The query string to search for. + source_ids (list[str]): List of source IDs to filter the search. + return_type (str): The type of return, e.g., 'chunks' or other. + k (int): The number of results to return. + + Returns: + dict: A dictionary containing the results of the search. + """ + + print("\n\n calling rag tool \n\n") + print(query) + # Create embedding for the query + embed_result = await embed(model=embedding_model, value=query) + + # print(embed_result) + # Fetch all active data sources for this project + sources = await data_sources_collection.find({ + "projectId": project_id, + "active": True + }).to_list(length=None) + + print(f"Sources: {sources}") + # Filter sources to those in source_ids + valid_source_ids = [ + str(s["_id"]) for s in sources if str(s["_id"]) in source_ids + ] + + print(f"Valid source ids: {valid_source_ids}") + # If no valid sources are found, return empty results + if not valid_source_ids: + return '' + + # Perform Qdrant vector search + print(f"Calling Qdrant search with limit {k}") + qdrant_results = qdrant_client.search( + collection_name="embeddings", + query_vector=embed_result["embedding"], + query_filter={ + "must": [ + {"key": "projectId", "match": {"value": project_id}}, + {"key": "sourceId", "match": {"any": valid_source_ids}}, + ] + }, + limit=k, + with_payload=True + ) + + # Map the Qdrant results to the desired format + results = [ + { + "title": point.payload["title"], + "name": point.payload["name"], + "content": point.payload["content"], + "docId": point.payload["docId"], + "sourceId": point.payload["sourceId"], + } + for point in qdrant_results + ] + + print(f"Return type: {return_type}") + print(f"Results: {results}") + # If return_type is 'chunks', return the results directly + if return_type == "chunks": + chunks = json.dumps({"Information": results}, indent=2) + print(f"Returning chunks: {chunks}") + return chunks + + # Otherwise, fetch the full document contents from MongoDB + doc_ids = [ObjectId(r["docId"]) for r in results] + docs = await data_source_docs_collection.find({"_id": {"$in": doc_ids}}).to_list(length=None) + + # Create a dictionary for quick lookup of documents by their string ID + doc_dict = {str(doc["_id"]): doc for doc in docs} + + # Update the results with the full document content + results = [ + {**r, "content": doc_dict.get(r["docId"], {}).get("content", "")} + for r in results + ] + + # Convert results to a JSON string + docs = json.dumps({"Information": results}, indent=2) + print(f"Returning docs: {docs}") + return docs + +if __name__ == "__main__": + asyncio.run(call_rag_tool( + project_id="faf2bfb3-41d4-4299-b0d2-048581ea9bd8", + query="What is the range on your scooter", + source_ids=["67e102c9fab4514d7aaeb5a4"], + return_type="docs", + k=3)) \ No newline at end of file diff --git a/rowboat_agents/src/graph/tools.py b/rowboat_agents/src/graph/tools.py new file mode 100644 index 000000000..425282c0d --- /dev/null +++ b/rowboat_agents/src/graph/tools.py @@ -0,0 +1,46 @@ +import json +import random + +def tool_raise_error(error_message): + print(f"Raising error: {error_message}") + raise ValueError(f"Raising error: {error_message}") + +def respond_to_tool_raise_error(tool_calls, mock=False): + error_message = json.loads(tool_calls[0]["function"]["arguments"]).get("error_message", "") + return _create_tool_response(tool_calls, tool_raise_error(error_message)) + +def tool_close_chat(error_message): + print(f"Closing chat: {error_message}") + raise ValueError(f"Closing chat: {error_message}") + +def respond_to_tool_close_chat(tool_calls, mock=False): + error_message = json.loads(tool_calls[0]["function"]["arguments"]).get("error_message", "") + return _create_tool_response(tool_calls, tool_close_chat(error_message)) + +def _create_tool_response(tool_calls, content, mock=False): + """ + Creates a standardized tool response format. + """ + return { + "role": "tool", + "content": content, + "tool_call_id": tool_calls[0]["id"], + "name": tool_calls[0]["function"]["name"] + } + +def create_error_tool_call(error_message): + error_message_tool_call = { + "role": "assistant", + "sender": "system", + "tool_calls": [ + { + "function": { + "name": "raise_error", + "arguments": "{\"error_message\":\"" + error_message + "\"}" + }, + "id": "call_" + ''.join(random.choices('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789', k=24)), + "type": "function" + } + ] + } + return error_message_tool_call \ No newline at end of file diff --git a/rowboat_agents/src/graph/tracing.py b/rowboat_agents/src/graph/tracing.py new file mode 100644 index 000000000..60f12a76c --- /dev/null +++ b/rowboat_agents/src/graph/tracing.py @@ -0,0 +1,212 @@ +from agents import TracingProcessor +import logging +from datetime import datetime, timedelta +import json + +logger = logging.getLogger(__name__) + +class AgentTurnTraceProcessor(TracingProcessor): + """Custom trace processor to print detailed information about agent turns.""" + + def __init__(self): + self.span_depth = {} # Track depth of each span + self.handoff_chain = [] # Track sequence of agent handoffs + self.message_flow = [] # Track message flow between agents + + def _get_indent_level(self, span): + """Calculate indent level based on parent_id chain.""" + depth = 0 + current_id = span.parent_id + while current_id: + depth += 1 + current_id = self.span_depth.get(current_id) + return depth + + def _format_time(self, timestamp_str): + """Convert ISO timestamp string to formatted time string in IST timezone.""" + try: + dt = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00')) + # Add 5 hours and 30 minutes for IST timezone + dt = dt + timedelta(hours=5, minutes=30) + return dt.strftime("%H:%M:%S.%f")[:-3] + except (ValueError, AttributeError): + return "00:00:00.000" + + def _calculate_duration(self, start_str, end_str): + """Calculate duration between two ISO timestamp strings in seconds.""" + try: + start = datetime.fromisoformat(start_str.replace('Z', '+00:00')) + end = datetime.fromisoformat(end_str.replace('Z', '+00:00')) + return (end - start).total_seconds() + except (ValueError, AttributeError): + return 0.0 + + def _get_span_id(self, span): + """Safely get span identifier.""" + for attr in ['span_id', 'id', 'trace_id']: + if hasattr(span, attr): + return getattr(span, attr) + return None + + def _print_handoff_chain(self, indent=""): + """Print the current handoff chain.""" + if self.handoff_chain: + print(f"{indent}Current Handoff Chain:") + print(f"{indent} {' -> '.join(self.handoff_chain)}") + + def _print_message_flow(self, indent=""): + """Print the message flow history.""" + if self.message_flow: + print(f"{indent}Message Flow History:") + for msg in self.message_flow: + print(f"{indent} {msg}") + + def on_trace_start(self, trace): + """Called when a trace starts.""" + separator = "="*100 + print("\n" + separator) + print("šŸš€ TRACE START") + print(f"Name: {trace.name}") + print(f"ID: {trace.trace_id}") + if trace.metadata: + print("\nMetadata:") + for key, value in trace.metadata.items(): + print(f" {key}: {value}") + print(separator + "\n") + + # Reset tracking for new trace + self.handoff_chain = [] + self.message_flow = [] + + def on_trace_end(self, trace): + """Called when a trace ends.""" + separator = "="*100 + print("\n" + separator) + print("āœ… TRACE END") + print(f"Name: {trace.name}") + print(f"ID: {trace.trace_id}") + + # Print final chain state + print("\nFinal State:") + self._print_handoff_chain(" ") + self._print_message_flow(" ") + + print(separator + "\n") + + # Clear tracking + self.span_depth.clear() + self.handoff_chain = [] + self.message_flow = [] + + def on_span_start(self, span): + """Called when a span starts.""" + try: + indent = " " * self._get_indent_level(span) + start_time = self._format_time(span.started_at) + span_id = self._get_span_id(span) + + # Track span depth + if span.parent_id and span_id: + self.span_depth[span_id] = span.parent_id + + # Print span header with clear section separator + print(f"\n{indent}{'>'*40}") + print(f"{indent}ā–¶ [{start_time}] {span.span_data.type.upper()} SPAN START") + print(f"{indent} ID: {span_id}") + print(f"{indent} Parent ID: {span.parent_id}") + + data = span.span_data.export() + + # Print span-specific information + if span.span_data.type == "agent": + agent_name = data.get('name', 'Unknown') + print(f"{indent} Agent: {agent_name}") + print(f"{indent} Handoffs: {', '.join(data.get('handoffs', []))}") + + # Track agent in handoff chain + if agent_name not in self.handoff_chain: + self.handoff_chain.append(agent_name) + self._print_handoff_chain(indent + " ") + + elif span.span_data.type == "generation": + print(f"{indent} Model: {data.get('model', 'Unknown')}") + messages = data.get('messages', []) + if messages: + print(f"{indent} Messages: {len(messages)} message(s)") + + elif span.span_data.type == "function": + print(f"{indent} Function: {data.get('name', 'Unknown')}") + args = data.get('arguments') + if args: + print(f"{indent} Arguments: {args}") + + elif span.span_data.type == "handoff": + from_agent = data.get('from_agent', 'Unknown') + to_agent = data.get('to_agent', 'Unknown') + print(f"{indent} From: {from_agent}") + print(f"{indent} To: {to_agent}") + + # Track handoff in message flow + flow_msg = f"{from_agent} -> {to_agent}" + self.message_flow.append(flow_msg) + print(f"{indent} Message Flow:") + for msg in self.message_flow[-3:]: # Show last 3 flows + print(f"{indent} {msg}") + + print(f"{indent}{'>'*40}") + + except Exception as e: + print(f"\nāŒ Error in on_span_start: {str(e)}") + import traceback + print(traceback.format_exc()) + + def on_span_end(self, span): + """Called when a span ends.""" + try: + indent = " " * self._get_indent_level(span) + end_time = self._format_time(span.ended_at) + duration = self._calculate_duration(span.started_at, span.ended_at) + + # Print span end information with clear section separator + print(f"\n{indent}{'<'*40}") + print(f"{indent}ā—€ [{end_time}] {span.span_data.type.upper()} SPAN END") + print(f"{indent} Duration: {duration:.3f}s") + + data = span.span_data.export() + + # Print span-specific output + if span.span_data.type == "generation": + output = data.get('output') + if output: + print(f"{indent} Output: {str(output)[:200]}...") + + elif span.span_data.type == "function": + output = data.get('output') + if output: + print(f"{indent} Output: {str(output)[:200]}...") + + elif span.span_data.type == "handoff": + self._print_handoff_chain(indent + " ") + self._print_message_flow(indent + " ") + + print(f"{indent}{'<'*40}") + + # Clean up span depth tracking + span_id = self._get_span_id(span) + if span_id and span_id in self.span_depth: + del self.span_depth[span_id] + + except Exception as e: + print(f"\nāŒ Error in on_span_end: {str(e)}") + import traceback + print(traceback.format_exc()) + + def shutdown(self): + """Called when the processor is shutting down.""" + self.span_depth.clear() + self.handoff_chain = [] + self.message_flow = [] + + def force_flush(self): + """Called to force flush any buffered traces/spans.""" + pass \ No newline at end of file diff --git a/rowboat_agents/src/graph/types.py b/rowboat_agents/src/graph/types.py new file mode 100644 index 000000000..4bead148b --- /dev/null +++ b/rowboat_agents/src/graph/types.py @@ -0,0 +1,26 @@ +from enum import Enum +class AgentRole(Enum): + ESCALATION = "escalation" + POST_PROCESSING = "post_process" + GUARDRAILS = "guardrails" + +class outputVisibility(Enum): + EXTERNAL = "user_facing" + INTERNAL = "internal" + +class ResponseType(Enum): + INTERNAL = "internal" + EXTERNAL = "external" + +class ControlType(Enum): + RETAIN = "retain" + PARENT_AGENT = "relinquish_to_parent" + START_AGENT = "start_agent" + +class PromptType(Enum): + STYLE = "style_prompt" + GREETING = "greeting" + +class ErrorType(Enum): + FATAL = "fatal" + ESCALATE = "escalate" \ No newline at end of file diff --git a/rowboat_agents/src/utils/__init__.py b/rowboat_agents/src/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/rowboat_agents/src/utils/client.py b/rowboat_agents/src/utils/client.py new file mode 100644 index 000000000..44af3e979 --- /dev/null +++ b/rowboat_agents/src/utils/client.py @@ -0,0 +1,38 @@ +import os +import logging +from openai import AsyncOpenAI, OpenAI +import dotenv +dotenv.load_dotenv() + +PROVIDER_BASE_URL = os.getenv('PROVIDER_BASE_URL', '') +PROVIDER_API_KEY = os.getenv('PROVIDER_API_KEY') +PROVIDER_DEFAULT_MODEL = os.getenv('PROVIDER_DEFAULT_MODEL') + +client = None +if not PROVIDER_API_KEY: + PROVIDER_API_KEY = os.getenv('OPENAI_API_KEY') + +if not PROVIDER_API_KEY: + raise(ValueError("No LLM Provider API key found")) + +if not PROVIDER_DEFAULT_MODEL: + PROVIDER_DEFAULT_MODEL = 'gpt-4.1' + +if PROVIDER_BASE_URL: + print(f"Using provider {PROVIDER_BASE_URL}") + client = AsyncOpenAI(base_url=PROVIDER_BASE_URL, api_key=PROVIDER_API_KEY) +else: + print("No provider base URL configured, using OpenAI directly") + +completions_client = None +if PROVIDER_BASE_URL: + print(f"Using provider {PROVIDER_BASE_URL} for completions") + completions_client = OpenAI( + base_url=PROVIDER_BASE_URL, + api_key=PROVIDER_API_KEY + ) +else: + print(f"Using OpenAI directly for completions") + completions_client = OpenAI( + api_key=PROVIDER_API_KEY + ) \ No newline at end of file diff --git a/rowboat_agents/src/utils/common.py b/rowboat_agents/src/utils/common.py new file mode 100644 index 000000000..4c74634a2 --- /dev/null +++ b/rowboat_agents/src/utils/common.py @@ -0,0 +1,210 @@ +import json +import logging +import os +import subprocess +import sys +import time +from dotenv import load_dotenv +from openai import OpenAI + +from src.utils.client import completions_client +load_dotenv() + +def setup_logger(name, log_file='./run.log', level=logging.INFO, log_to_file=False): + """Function to set up a logger with a specific name and log file.""" + formatter = logging.Formatter('%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] %(message)s') + + # Changed to use stderr instead of stdout + handler = logging.StreamHandler(sys.stderr) + handler.setFormatter(formatter) + + # Create a logger and set its level + logger = logging.getLogger(name) + logger.setLevel(level) + + # Clear any existing handlers to avoid duplicates + if logger.hasHandlers(): + logger.handlers.clear() + + # Prevent propagation to parent loggers + logger.propagate = False + + logger.addHandler(handler) + + return logger + +common_logger = setup_logger('logger') +logger = common_logger + +def read_json_from_file(file_name): + logger.info(f"Reading json from {file_name}") + try: + with open(file_name, 'r') as file: + out = file.read() + out = json.loads(out) + return out + except Exception as e: + logger.error(e) + return None + +def get_api_key(key_name): + api_key = os.getenv(key_name) + # Check if the API key was loaded successfully + if not api_key: + raise ValueError(f"{key_name} not found. Did you set it in the .env file?") + return api_key + +def generate_model_output_from_multi_turn_conv(messages, output_type='json', model=None): + """Generate output from a multi-turn conversation using any supported model.""" + return generate_openai_output(messages, output_type, model) + +def generate_openai_output(messages, output_type='not_json', model=None, return_completion=False, tools=None): + """Generate output using any model through the configured provider (e.g. LiteLLM). + All models are handled through the same OpenAI-compatible interface.""" + print(f"Using client: {completions_client} and model: {model}") + try: + # Prepare the API call parameters + params = { + "model": model, + "messages": messages, + } + + # Add tools if provided + if tools: + params["tools"] = tools + # For Modal models, we need to format the tools in a way they can understand + if model and model.startswith("modal/"): + # Convert OpenAI-style tools to Modal format + modal_tools = [] + for tool in tools: + if "function" in tool: + modal_tool = { + "name": tool["function"]["name"], + "description": tool["function"]["description"], + "parameters": tool["function"]["parameters"] + } + modal_tools.append(modal_tool) + params["tools"] = modal_tools + + # Add response format for JSON output + if output_type == 'json': + params["response_format"] = {"type": "json_object"} + + # Make the API call + chat_completion = completions_client.chat.completions.create(**params) + + if return_completion: + return chat_completion + return chat_completion.choices[0].message.content + + except Exception as e: + logger.error(e) + return None + +def generate_llm_output(messages, model, tools=None): + """Generate output using any model through the configured provider. + The actual model routing is handled by the provider (e.g. LiteLLM).""" + return generate_openai_output(messages, output_type='text', model=model, tools=tools) + +def generate_model_output_from_multi_turn_conv_multithreaded(messages, retries=5, delay=1, output_type='json', model=None): + """Generate output from a multi-turn conversation with retry logic for any supported model.""" + while retries > 0: + try: + output = generate_model_output_from_multi_turn_conv(messages, output_type=output_type, model=model) + return output + except Exception as e: + print(f'Error occurred. Retrying in {delay} seconds...') + time.sleep(delay) + delay *= 2 # Exponential backoff + retries -= 1 + + if retries == 0: + print(f'Failed to process after multiple retries.') + return [] + +def convert_message_content_json_to_strings(messages): + for msg in messages: + if 'content' in msg.keys() and isinstance(msg['content'], dict): + msg['content'] = json.dumps(msg['content']) + return messages + +def merge_defaultdicts(dict_parent, dict_child): + for key, value in dict_child.items(): + if key in dict_parent: + # If the key exists in both, handle merging based on type + if isinstance(dict_parent[key], list): + dict_parent[key].extend(value) + elif isinstance(dict_parent[key], dict): + dict_parent[key].update(value) + elif isinstance(dict_parent[key], set): + dict_parent[key].update(value) + else: + dict_parent[key] += value # For other types like int, float, etc. + else: + dict_parent[key] = value + + return dict_parent + +def read_jsonl_from_file(file_name): + # logger.info(f"Reading jsonl from {file_name}") + try: + with open(file_name, 'r') as file: + lines = file.readlines() + dataset = [json.loads(line.strip()) for line in lines] + return dataset + except Exception as e: + logger.error(e) + return None + +def write_jsonl_to_file(list_dicts, file_name): + try: + with open(file_name, 'w') as file: + for d in list_dicts: + file.write(json.dumps(d)+'\n') + return True + except Exception as e: + logger.error(e) + return False + +def read_text_from_file(file_name): + try: + with open(file_name, 'r') as file: + out = file.read() + return out + except Exception as e: + logger.error(e) + return None + +def write_json_to_file(data, file_name): + try: + with open(file_name, 'w') as file: + json.dump(data, file, indent=4) + return True + except Exception as e: + logger.error(e) + return False + + +def get_git_path(path): + # Run `git rev-parse --show-toplevel` to get the root of the Git repository + try: + git_root = subprocess.check_output(["git", "rev-parse", "--show-toplevel"], text=True).strip() + return f"{git_root}/{path}" + except subprocess.CalledProcessError: + raise RuntimeError("Not inside a Git repository") + +def update_tokens_used(provider, model, tokens_used, completion): + provider_model = f"{provider}/{model}" + input_tokens = completion.usage.prompt_tokens + output_tokens = completion.usage.completion_tokens + + if provider_model not in tokens_used: + tokens_used[provider_model] = { + 'input_tokens': 0, + 'output_tokens': 0, + } + + tokens_used[provider_model]['input_tokens'] += input_tokens + tokens_used[provider_model]['output_tokens'] += output_tokens + + return tokens_used \ No newline at end of file diff --git a/rowboat_agents/tests/__init__.py b/rowboat_agents/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/rowboat_agents/tests/app_client.py b/rowboat_agents/tests/app_client.py new file mode 100644 index 000000000..0669a97cf --- /dev/null +++ b/rowboat_agents/tests/app_client.py @@ -0,0 +1,22 @@ +from src.utils.common import common_logger, read_json_from_file +logger = common_logger +logger.info("Running swarm_flask_client.py") +import requests + +if __name__ == "__main__": + import argparse + parser = argparse.ArgumentParser() + parser.add_argument('--sample_request', type=str, required=True, help='Sample request JSON file name under tests/sample_requests/') + parser.add_argument('--api_key', type=str, required=True, help='API key to use for authentication') + parser.add_argument('--host', type=str, required=False, help='Host to use for the request', default='http://localhost:4040') + args = parser.parse_args() + + request = read_json_from_file(f"./tests/sample_requests/{args.sample_request}").get("lastRequest", {}) + print("Sending request...") + response = requests.post( + f"{args.host}/chat", + json=request, + headers={'Authorization': f'Bearer {args.api_key}'} + ).json() + print("Output: ") + print(response) \ No newline at end of file diff --git a/rowboat_agents/tests/app_client_streaming.py b/rowboat_agents/tests/app_client_streaming.py new file mode 100644 index 000000000..f045d161a --- /dev/null +++ b/rowboat_agents/tests/app_client_streaming.py @@ -0,0 +1,137 @@ +from src.utils.common import read_json_from_file +import requests +import json +import argparse +from datetime import datetime + +print("Running app_client_streaming.py") + +def preprocess_messages(messages): + # Preprocess messages to handle null content and role issues + for msg in messages: + # Handle null content in assistant messages with tool calls + if (msg.get("role") == "assistant" and + msg.get("content") is None and + msg.get("tool_calls") is not None and + len(msg.get("tool_calls")) > 0): + msg["content"] = "Calling tool" + + # Handle role issues + if msg.get("role") == "tool": + msg["role"] = "developer" + elif not msg.get("role"): + msg["role"] = "user" + + return messages + +def stream_chat(host, request_data, api_key): + start_time = datetime.now() + print("\n" + "="*80) + print(f"Starting streaming chat at {start_time}") + print(f"Host: {host}") + print("="*80 + "\n") + + try: + print("\n" + "-"*80) + print("Connecting to stream...") + stream_response = requests.post( + f"{host}/chat_stream", + json=request_data, + headers={ + 'Authorization': f'Bearer {api_key}', + 'Accept': 'text/event-stream' + }, + stream=True + ) + + if stream_response.status_code != 200: + print(f"Error connecting to stream. Status code: {stream_response.status_code}") + print(f"Response: {stream_response.text}") + return + + print(f"Successfully connected to stream") + print("-"*80 + "\n") + + event_count = 0 + + try: + print("\n" + "-"*80) + print("Starting to process events...") + print("-"*80 + "\n") + + for line in stream_response.iter_lines(decode_unicode=True): + if line: + if line.startswith('data: '): + data = line[6:] # Remove 'data: ' prefix + try: + event_data = json.loads(data) + event_count += 1 + print("\n" + "*"*80) + print(f"Event #{event_count} at {datetime.now().isoformat()}") + + if isinstance(event_data, dict): + # Pretty print the event data + print("Event Data:") + print(json.dumps(event_data, indent=2)) + + # Special handling for message events + if 'content' in event_data: + print("\nMessage Content:", event_data['content']) + if event_data.get('tool_calls'): + print("Tool Calls:", json.dumps(event_data['tool_calls'], indent=2)) + else: + print("Event Data:", event_data) + print("*"*80 + "\n") + + except json.JSONDecodeError as e: + print(f"Error decoding event data: {e}") + print(f"Raw data: {data}") + + except Exception as e: + print(f"Error processing stream: {e}") + import traceback + traceback.print_exc() + finally: + print("\n" + "-"*80) + print(f"Closing stream after processing {event_count} events") + print("-"*80 + "\n") + stream_response.close() + + except requests.exceptions.RequestException as e: + print(f"Request error during streaming: {e}") + import traceback + traceback.print_exc() + + end_time = datetime.now() + duration = end_time - start_time + print("\n" + "="*80) + print(f"Streaming session completed at {end_time}") + print(f"Total duration: {duration}") + print("="*80 + "\n") + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument('--sample_request', type=str, required=False, default='tmp.json', + help='Sample request JSON file name under tests/sample_requests/') + parser.add_argument('--api_key', type=str, required=False, default='test', + help='API key to use for authentication') + parser.add_argument('--host', type=str, default='http://localhost:4040', + help='Host to use for the request') + args = parser.parse_args() + + try: + print("\n" + "="*80) + print("Loading request data...") + request = read_json_from_file(f"./tests/sample_requests/{args.sample_request}").get("lastRequest", {}) + print("Request data:") + print(json.dumps(request, indent=2)) + print("Starting streaming request...") + print("="*80 + "\n") + + stream_chat(args.host, request, args.api_key) + except Exception as e: + print("\n" + "!"*80) + print(f"Error in main: {e}") + import traceback + traceback.print_exc() + print("!"*80 + "\n") diff --git a/rowboat_agents/tests/interactive.py b/rowboat_agents/tests/interactive.py new file mode 100644 index 000000000..f1c72fcea --- /dev/null +++ b/rowboat_agents/tests/interactive.py @@ -0,0 +1,207 @@ +import copy +from datetime import datetime +import json +import sys +import asyncio +import requests +import argparse + +from src.utils.common import common_logger, read_json_from_file +logger = common_logger + +def preprocess_messages(messages): + # Preprocess messages to handle null content and role issues + for msg in messages: + # Handle null content in assistant messages with tool calls + if (msg.get("role") == "assistant" and + msg.get("content") is None and + msg.get("tool_calls") is not None and + len(msg.get("tool_calls")) > 0): + msg["content"] = "Calling tool" + + # Handle role issues + if msg.get("role") == "tool": + msg["role"] = "developer" + elif not msg.get("role"): + msg["role"] = "user" + + return messages + +def stream_chat(host, request_data, api_key): + start_time = datetime.now() + print("\n" + "="*80) + print(f"Starting streaming chat at {start_time}") + print(f"Host: {host}") + print("="*80 + "\n") + + try: + print("\n" + "-"*80) + print("Connecting to stream...") + stream_response = requests.post( + f"{host}/chat_stream", + json=request_data, + headers={ + 'Authorization': f'Bearer {api_key}', + 'Accept': 'text/event-stream' + }, + stream=True + ) + + if stream_response.status_code != 200: + print(f"Error connecting to stream. Status code: {stream_response.status_code}") + print(f"Response: {stream_response.text}") + return None, None + + print(f"Successfully connected to stream") + print("-"*80 + "\n") + + event_count = 0 + collected_messages = [] + final_state = None + + try: + print("\n" + "-"*80) + print("Starting to process events...") + print("-"*80 + "\n") + + for line in stream_response.iter_lines(decode_unicode=True): + if line: + if line.startswith('data: '): + data = line[6:] # Remove 'data: ' prefix + try: + event_data = json.loads(data) + event_count += 1 + print("\n" + "*"*80) + print(f"Event #{event_count} at {datetime.now().isoformat()}") + + if isinstance(event_data, dict): + # Pretty print the event data + print("Event Data:") + print(json.dumps(event_data, indent=2)) + + # Special handling for message events + if 'content' in event_data: + print("\nMessage Content:", event_data['content']) + if event_data.get('tool_calls'): + print("Tool Calls:", json.dumps(event_data['tool_calls'], indent=2)) + + # Collect messages + collected_messages.append(event_data) + else: + print("Event Data:", event_data) + print("*"*80 + "\n") + + except json.JSONDecodeError as e: + print(f"Error decoding event data: {e}") + print(f"Raw data: {data}") + + except Exception as e: + print(f"Error processing stream: {e}") + import traceback + traceback.print_exc() + finally: + print("\n" + "-"*80) + print(f"Closing stream after processing {event_count} events") + print("-"*80 + "\n") + stream_response.close() + + except requests.exceptions.RequestException as e: + print(f"Request error during streaming: {e}") + import traceback + traceback.print_exc() + + end_time = datetime.now() + duration = end_time - start_time + print("\n" + "="*80) + print(f"Streaming session completed at {end_time}") + print(f"Total duration: {duration}") + print("="*80 + "\n") + + return collected_messages, final_state + +if __name__ == "__main__": + logger.info(f"{'*'*50}Running interactive mode{'*'*50}") + + parser = argparse.ArgumentParser() + parser.add_argument('--config', type=str, required=False, default='default_config.json', + help='Config file name under configs/') + parser.add_argument('--sample_request', type=str, required=False, default='default_example.json', + help='Sample request JSON file name under tests/sample_requests/') + parser.add_argument('--api_key', type=str, required=False, default='test', + help='API key to use for authentication') + parser.add_argument('--host', type=str, default='http://localhost:4040', + help='Host to use for the request') + parser.add_argument('--load_messages', action='store_true', + help='Load messages from sample request file') + args = parser.parse_args() + + print(f"Config file: {args.config}") + print(f"Sample request file: {args.sample_request}") + + config = read_json_from_file(f"./configs/{args.config}") + example_request = read_json_from_file(f"./tests/sample_requests/{args.sample_request}").get("lastRequest", {}) + + if args.load_messages: + messages = example_request.get("messages", []) + user_input_needed = False + else: + messages = [] + user_input_needed = True + + state = example_request.get("state", {}) + start_agent_name = example_request.get("startAgent", "") + last_agent_name = state.get("last_agent_name", "") + if not last_agent_name: + last_agent_name = start_agent_name + + logger.info("Starting main conversation loop") + start_time = None + while True: + logger.info("Loading configuration files") + + # To account for updates to state + complete_request = copy.deepcopy(example_request) + complete_request["messages"] = messages + complete_request["state"] = state + complete_request["startAgent"] = start_agent_name + + print(f"\nUsing agent: {last_agent_name}") + + if user_input_needed: + user_inp = input('\nUSER: ') + messages.append({ + "role": "user", + "content": user_inp + }) + if user_inp == 'exit': + logger.info("User requested exit") + break + logger.info("Added user message to conversation") + + start_time = datetime.now() + + # Preprocess messages + print("Preprocessing messages") + messages = preprocess_messages(messages) + complete_request["messages"] = preprocess_messages(complete_request["messages"]) + + # Run the streaming turn + resp_messages, resp_state = stream_chat( + host=args.host, + request_data=complete_request, + api_key=args.api_key + ) + + if resp_messages: + state = resp_state + if config.get("return_diff_messages", True): + messages.extend(resp_messages) + else: + messages = resp_messages + + user_input_needed = True + print("Quick stats") + print(f"Turn Duration: {round((datetime.now() - start_time).total_seconds() * 10) / 10:.1f}s") + print('='*50) + + print("\n" + "-" * 80) \ No newline at end of file diff --git a/rowboat_agents/tests/sample_requests/default_example.json b/rowboat_agents/tests/sample_requests/default_example.json new file mode 100644 index 000000000..8fc308c6e --- /dev/null +++ b/rowboat_agents/tests/sample_requests/default_example.json @@ -0,0 +1,235 @@ +{ + "lastRequest": { + "messages": [ + { + "content": "", + "role": "system", + "sender": null, + "tool_calls": null, + "tool_call_id": null, + "tool_name": null + }, + { + "content": "hi", + "role": "user", + "sender": null, + "tool_calls": null, + "tool_call_id": null, + "tool_name": null + } + ], + "state": { + "last_agent_name": "Door Dash Hub" + }, + "agents": [ + { + "name": "Door Dash Hub", + "type": "conversation", + "description": "Hub agent to manage Door Dash-related queries.", + "instructions": "## šŸ§‘ā€šŸ’¼ Role:\nYou are responsible for directing Door Dash-related queries to appropriate agents.\n\n---\n## āš™ļø Steps to Follow:\n1. Greet the user and ask which Door Dash-related query they need help with (e.g., 'Are you facing issues with your order items or delivery timing?').\n2. If the query matches a specific task, direct the user to the corresponding agent:\n - Order Issue → [@agent:Order Issue]\n - Delayed Delivery → [@agent:Delayed Delivery]\n3. If the query doesn't match any specific task, respond with 'I'm sorry, I didn't understand. Could you clarify your request?' or escalate to human support.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Issues with order items\n- Delayed delivery issues\n\nāŒ Out of Scope:\n- Issues unrelated to Door Dash\n- General knowledge queries\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- Direct queries to specific Door Dash agents promptly.\n- Call [@agent:Escalation] agent for unrecognized queries.\n\n🚫 Don'ts:\n- Engage in detailed support.\n- Extend the conversation beyond Door Dash.\n- Provide user-facing text such as 'I will connect you now...' when calling another agent\n\n# Examples\n- **User** : I need help with my order items.\n - **Agent actions**: [@agent:Order Issue](#mention)\n\n- **User** : My delivery is delayed.\n - **Agent actions**: Call [@agent:Delayed Delivery](#mention)\n\n- **User** : I'm not sure where my order is.\n - **Agent actions**: Call [@agent:Delayed Delivery](#mention)\n\n- **User** : Can you reset my order settings?\n - **Agent actions**: [@agent:Escalation](#mention)\n\n- **User** : How are you today?\n - **Agent response**: I'm doing great. What would like help with today?", + "model": "gpt-4o-mini", + "hasRagSources": false, + "controlType": "retain", + "tools": [], + "prompts": [], + "connectedAgents": [ + "Order Issue", + "Delayed Delivery", + "Escalation" + ] + }, + { + "name": "Post process", + "type": "post_process", + "description": "", + "instructions": "Ensure that the agent response is terse and to the point.", + "model": "gpt-4o-mini", + "hasRagSources": false, + "controlType": "retain", + "tools": [], + "prompts": [], + "connectedAgents": [] + }, + { + "name": "Escalation", + "type": "escalation", + "description": "", + "instructions": "Get the user's contact information and let them know that their request has been escalated.", + "model": "gpt-4o-mini", + "hasRagSources": false, + "controlType": "retain", + "tools": [], + "prompts": [], + "connectedAgents": [] + }, + { + "name": "Order Issue", + "type": "conversation", + "description": "Agent to assist users with missing or incorrect order items.", + "instructions": "## šŸ§‘ā€šŸ’¼ Role:\nAssist users with issues related to missing or incorrect order items.\n\n---\n## āš™ļø Steps to Follow:\n1. Fetch the order details using the [@tool:get_order_details] tool.\n2. Confirm the issue with the user.\n3. Provide solutions or escalate if unresolved.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Handling missing or incorrect order items\n\nāŒ Out of Scope:\n- Delayed delivery issues\n- General knowledge queries\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- Ensure the user is aware of the order details before proceeding.\n\n🚫 Don'ts:\n- Extend the conversation beyond order issues.\n\n# Examples\n- **User** : I received the wrong item in my order.\n - **Agent response**: I can help with that. Let me fetch your order details first.\n - **Agent actions**: Call [@tool:get_order_details](#mention)\n\n- **User** : My order is missing an item.\n - **Agent response**: Let's check your order details and resolve this issue.\n - **Agent actions**: Call [@tool:get_order_details](#mention)\n\n- **User** : I got someone else's order.\n - **Agent response**: I apologize for the mix-up. I'll fetch your order details to sort this out.\n - **Agent actions**: Call [@tool:get_order_details](#mention)\n\n- **User** : Can you help me with a missing item?\n - **Agent response**: Certainly, I'll look into your order details right away.\n - **Agent actions**: Call [@tool:get_order_details](#mention)\n\n- **User** : There's an issue with my order items.\n - **Agent response**: Let's verify your order details to address this issue.\n - **Agent actions**: Call [@tool:get_order_details](#mention)", + "model": "gpt-4o", + "hasRagSources": false, + "controlType": "retain", + "tools": [ + "get_order_details" + ], + "prompts": [], + "connectedAgents": [] + }, + { + "name": "Delayed Delivery", + "type": "conversation", + "description": "Agent to assist users with delayed delivery issues.", + "instructions": "## šŸ§‘ā€šŸ’¼ Role:\nAssist users with issues related to delayed delivery.\n\n---\n## āš™ļø Steps to Follow:\n1. Fetch the delivery status using the [@tool:get_delivery_status] tool.\n2. Confirm the delay with the user.\n3. Provide solutions or escalate if unresolved.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Handling delayed delivery issues\n\nāŒ Out of Scope:\n- Missing or incorrect order items\n- General knowledge queries\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- Ensure the user is aware of the delivery status before proceeding.\n\n🚫 Don'ts:\n- Extend the conversation beyond delivery issues.\n\n# Examples\n- **User** : My delivery is late.\n - **Agent response**: I can help with that. Let me fetch your delivery status first.\n - **Agent actions**: Call [@tool:get_delivery_status](#mention)\n\n- **User** : Where is my order? It's delayed.\n - **Agent response**: Let's check your delivery status and resolve this issue.\n - **Agent actions**: Call [@tool:get_delivery_status](#mention)\n\n- **User** : My order hasn't arrived yet.\n - **Agent response**: I apologize for the delay. I'll fetch your delivery status to sort this out.\n - **Agent actions**: Call [@tool:get_delivery_status](#mention)\n\n- **User** : Can you help me with a delayed delivery?\n - **Agent response**: Certainly, I'll look into your delivery status right away.\n - **Agent actions**: Call [@tool:get_delivery_status](#mention)\n\n- **User** : There's an issue with my delivery timing.\n - **Agent response**: Let's verify your delivery status to address this issue.\n - **Agent actions**: Call [@tool:get_delivery_status](#mention)", + "model": "gpt-4o", + "hasRagSources": false, + "controlType": "retain", + "tools": [ + "get_delivery_status" + ], + "prompts": [], + "connectedAgents": [] + } + ], + "tools": [ + { + "name": "get_order_details", + "description": "Tool to fetch details about the user's order.", + "parameters": { + "type": "object", + "properties": { + "order_id": { + "type": "string", + "description": "The unique identifier for the order." + } + }, + "required": [ + "order_id" + ] + }, + "mockTool": true, + "mockInstructions": "Return a mock response for Door Dash order details." + }, + { + "name": "get_delivery_status", + "description": "Tool to fetch the current status of the delivery.", + "parameters": { + "type": "object", + "properties": { + "order_id": { + "type": "string", + "description": "The unique identifier for the order." + } + }, + "required": [ + "order_id" + ] + } + } + ], + "prompts": [ + { + "name": "Style prompt", + "type": "style_prompt", + "prompt": "You should be empathetic and helpful." + } + ], + "startAgent": "Door Dash Hub" + }, + "lastResponse": { + "messages": [ + { + "annotations": [], + "content": "Hello! How can I assist you today? Are you facing issues with your order items or delivery timing?", + "created_at": "2025-03-19T12:29:06.547196", + "current_turn": true, + "response_type": "internal", + "role": "assistant", + "sender": "Door Dash Hub" + }, + { + "annotations": [], + "content": "Hi! How can I help you today? Are you having issues with your order items or delivery timing?", + "created_at": "2025-03-19T12:29:06.547196", + "current_turn": true, + "response_type": "external", + "role": "assistant", + "sender": "Door Dash Hub >> Post process" + } + ], + "state": { + "agent_data": [ + { + "child_functions": [ + "transfer_to_escalation", + "transfer_to_order_issue", + "transfer_to_delayed_delivery" + ], + "external_tools": [], + "history": [ + { + "content": "hi", + "current_turn": true, + "role": "user" + }, + { + "annotations": [], + "content": "Hello! How can I assist you today? Are you facing issues with your order items or delivery timing?", + "created_at": "2025-03-19T12:29:06.547196", + "current_turn": true, + "response_type": "internal", + "role": "assistant", + "sender": "Door Dash Hub" + } + ], + "instructions": "## šŸ§‘ā€šŸ’¼ Role:\nYou are responsible for directing Door Dash-related queries to appropriate agents.\n\n---\n## āš™ļø Steps to Follow:\n1. Greet the user and ask which Door Dash-related query they need help with (e.g., 'Are you facing issues with your order items or delivery timing?').\n2. If the query matches a specific task, direct the user to the corresponding agent:\n - Order Issue → [@agent:Order Issue]\n - Delayed Delivery → [@agent:Delayed Delivery]\n3. If the query doesn't match any specific task, respond with 'I'm sorry, I didn't understand. Could you clarify your request?' or escalate to human support.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Issues with order items\n- Delayed delivery issues\n\nāŒ Out of Scope:\n- Issues unrelated to Door Dash\n- General knowledge queries\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- Direct queries to specific Door Dash agents promptly.\n- Call [@agent:Escalation] agent for unrecognized queries.\n\n🚫 Don'ts:\n- Engage in detailed support.\n- Extend the conversation beyond Door Dash.\n- Provide user-facing text such as 'I will connect you now...' when calling another agent\n\n# Examples\n- **User** : I need help with my order items.\n - **Agent actions**: [@agent:Order Issue](#mention)\n\n- **User** : My delivery is delayed.\n - **Agent actions**: Call [@agent:Delayed Delivery](#mention)\n\n- **User** : I'm not sure where my order is.\n - **Agent actions**: Call [@agent:Delayed Delivery](#mention)\n\n- **User** : Can you reset my order settings?\n - **Agent actions**: [@agent:Escalation](#mention)\n\n- **User** : How are you today?\n - **Agent response**: I'm doing great. What would like help with today?\n\n----------------------------------------------------------------------------------------------------\n\n\n# Instructions about using other specialized agents\nYou have the following specialized agents that you can transfer the chat to, using the appropriate tool calls for the transfer: \nName: Escalation\nDescription: \nTool for transfer: transfer_to_escalation\n----------------------------------------------------------------------------------------------------\nName: Order Issue\nDescription: Agent to assist users with missing or incorrect order items.\nTool for transfer: transfer_to_order_issue\n----------------------------------------------------------------------------------------------------\nName: Delayed Delivery\nDescription: Agent to assist users with delayed delivery issues.\nTool for transfer: transfer_to_delayed_delivery\n\n## Notes:\n- Transfer the chat to the appropriate agent, based on the chat history and / or the user's request.\n- When you transfer the chat to another agent, you should not provide any response to the user. For example, do not say 'Transferring chat to X agent' or anything like that. Just invoke the tool call to transfer to the other agent.\n- Do NOT ever mention the existence of other agents. For example, do not say 'Please check with X agent for details regarding processing times.' or anything like that.\n- If any other agent transfers the chat to you without responding to the user, it means that they don't know how to help. Do not transfer the chat to back to the same agent in this case. In such cases, you should transfer to the escalation agent using the appropriate tool call. Never ask the user to contact support.\n\n\n----------------------------------------------------------------------------------------------------\n\n\n# Additional System-Wide Context or Instructions:\n\n", + "internal_tools": [], + "most_recent_parent_name": "", + "name": "Door Dash Hub", + "parent_function": null + }, + { + "child_functions": [], + "external_tools": [], + "history": [], + "instructions": "Get the user's contact information and let them know that their request has been escalated.\n\n----------------------------------------------------------------------------------------------------\n\n\n# Additional System-Wide Context or Instructions:\n\n", + "internal_tools": [], + "most_recent_parent_name": "", + "name": "Escalation", + "parent_function": null + }, + { + "child_functions": [], + "external_tools": [ + "get_order_details" + ], + "history": [], + "instructions": "## šŸ§‘ā€šŸ’¼ Role:\nAssist users with issues related to missing or incorrect order items.\n\n---\n## āš™ļø Steps to Follow:\n1. Fetch the order details using the [@tool:get_order_details] tool.\n2. Confirm the issue with the user.\n3. Provide solutions or escalate if unresolved.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Handling missing or incorrect order items\n\nāŒ Out of Scope:\n- Delayed delivery issues\n- General knowledge queries\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- Ensure the user is aware of the order details before proceeding.\n\n🚫 Don'ts:\n- Extend the conversation beyond order issues.\n\n# Examples\n- **User** : I received the wrong item in my order.\n - **Agent response**: I can help with that. Let me fetch your order details first.\n - **Agent actions**: Call [@tool:get_order_details](#mention)\n\n- **User** : My order is missing an item.\n - **Agent response**: Let's check your order details and resolve this issue.\n - **Agent actions**: Call [@tool:get_order_details](#mention)\n\n- **User** : I got someone else's order.\n - **Agent response**: I apologize for the mix-up. I'll fetch your order details to sort this out.\n - **Agent actions**: Call [@tool:get_order_details](#mention)\n\n- **User** : Can you help me with a missing item?\n - **Agent response**: Certainly, I'll look into your order details right away.\n - **Agent actions**: Call [@tool:get_order_details](#mention)\n\n- **User** : There's an issue with my order items.\n - **Agent response**: Let's verify your order details to address this issue.\n - **Agent actions**: Call [@tool:get_order_details](#mention)\n\n----------------------------------------------------------------------------------------------------\n\n\n# Instructions about giving up chat control\nIf you are unable to handle the chat (e.g. if it is not in your scope of instructions), you should use the tool call provided to give up control of the chat.\nTool for transfer: give_up_chat_control\n\n## Notes:\n- When you give up control of the chat, you should not provide any response to the user. Just invoke the tool call to give up control.\n\n\n----------------------------------------------------------------------------------------------------\n\n\n# Additional System-Wide Context or Instructions:\n\n", + "internal_tools": [], + "most_recent_parent_name": "", + "name": "Order Issue", + "parent_function": null + }, + { + "child_functions": [], + "external_tools": [ + "get_delivery_status" + ], + "history": [], + "instructions": "## šŸ§‘ā€šŸ’¼ Role:\nAssist users with issues related to delayed delivery.\n\n---\n## āš™ļø Steps to Follow:\n1. Fetch the delivery status using the [@tool:get_delivery_status] tool.\n2. Confirm the delay with the user.\n3. Provide solutions or escalate if unresolved.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Handling delayed delivery issues\n\nāŒ Out of Scope:\n- Missing or incorrect order items\n- General knowledge queries\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- Ensure the user is aware of the delivery status before proceeding.\n\n🚫 Don'ts:\n- Extend the conversation beyond delivery issues.\n\n# Examples\n- **User** : My delivery is late.\n - **Agent response**: I can help with that. Let me fetch your delivery status first.\n - **Agent actions**: Call [@tool:get_delivery_status](#mention)\n\n- **User** : Where is my order? It's delayed.\n - **Agent response**: Let's check your delivery status and resolve this issue.\n - **Agent actions**: Call [@tool:get_delivery_status](#mention)\n\n- **User** : My order hasn't arrived yet.\n - **Agent response**: I apologize for the delay. I'll fetch your delivery status to sort this out.\n - **Agent actions**: Call [@tool:get_delivery_status](#mention)\n\n- **User** : Can you help me with a delayed delivery?\n - **Agent response**: Certainly, I'll look into your delivery status right away.\n - **Agent actions**: Call [@tool:get_delivery_status](#mention)\n\n- **User** : There's an issue with my delivery timing.\n - **Agent response**: Let's verify your delivery status to address this issue.\n - **Agent actions**: Call [@tool:get_delivery_status](#mention)\n\n----------------------------------------------------------------------------------------------------\n\n\n# Instructions about giving up chat control\nIf you are unable to handle the chat (e.g. if it is not in your scope of instructions), you should use the tool call provided to give up control of the chat.\nTool for transfer: give_up_chat_control\n\n## Notes:\n- When you give up control of the chat, you should not provide any response to the user. Just invoke the tool call to give up control.\n\n\n----------------------------------------------------------------------------------------------------\n\n\n# Additional System-Wide Context or Instructions:\n\n", + "internal_tools": [], + "most_recent_parent_name": "", + "name": "Delayed Delivery", + "parent_function": null + } + ], + "last_agent_name": "Door Dash Hub" + }, + "tokens_used": { + "openai/gpt-4o-mini": { + "input_tokens": 1731, + "output_tokens": 45 + } + } + } +} \ No newline at end of file diff --git a/rowboat_agents/tests/sample_requests/example1.json b/rowboat_agents/tests/sample_requests/example1.json new file mode 100644 index 000000000..88a07eae1 --- /dev/null +++ b/rowboat_agents/tests/sample_requests/example1.json @@ -0,0 +1,424 @@ +{ + "lastRequest": { + "messages": [ + { + "content": "hi", + "role": "user", + "sender": null, + "tool_calls": null, + "tool_call_id": null, + "tool_name": null + }, + { + "content": "Hello! How can I assist you today with your XYZ Bike?", + "role": "assistant", + "sender": "Main agent", + "tool_calls": null, + "tool_call_id": null, + "tool_name": null, + "response_type": "internal" + }, + { + "content": "Hello! How can I assist you today with your XYZ Bike?", + "role": "assistant", + "sender": "Main agent >> Post process", + "tool_calls": null, + "tool_call_id": null, + "tool_name": null, + "response_type": "external" + }, + { + "content": "i want to know about the range", + "role": "user", + "sender": null, + "tool_calls": null, + "tool_call_id": null, + "tool_name": null + }, + { + "content": null, + "role": "assistant", + "sender": "Main agent", + "tool_calls": [ + { + "function": { + "arguments": "{\"args\":\"\",\"kwargs\":\"\"}", + "name": "transfer_to_product_info_agent" + }, + "id": "call_0MJHin0XCMyEJjA7T2FTJLZL", + "type": "function" + } + ], + "tool_call_id": null, + "tool_name": null, + "response_type": "internal" + }, + { + "content": "{\"assistant\": \"Product info agent\"}", + "role": "tool", + "sender": null, + "tool_calls": null, + "tool_call_id": "call_0MJHin0XCMyEJjA7T2FTJLZL", + "tool_name": "transfer_to_product_info_agent" + }, + { + "content": null, + "role": "assistant", + "sender": "Product info agent", + "tool_calls": [ + { + "function": { + "arguments": "{\"question\":\"XYZ Bike travel range\"}", + "name": "getArticleInfo" + }, + "id": "call_CcNzb2N3lBt4JOCVrzyHdpdL", + "type": "function" + } + ], + "tool_call_id": null, + "tool_name": null, + "response_type": "internal" + }, + { + "content": "{\"results\":[{\"title\":\"XYZ Electric Bike\",\"content\":\"# XYZ Electric Bike\\n\\n### Transforming Transportation with the XYZ Electric Bike ### Revolutionizing Urban Mobility XYZ Electric Bike reimagines how we navigate cities, offering a seamless, stress-free alternative to traffic jams, pricey rideshares, rigid schedules, and the hassle of finding parking. --- #### **Instant Foldability** With a single press, XYZ's proprietary hinge mechanism folds the bike smoothly and securely in one swift motion. This innovation makes carrying and storing the bike effortless—outperforming the competition in both speed and ease of use. --- #### **Exceptional Handlebars** The sleek magnesium alloy handlebars are a marvel of design, housing intuitive controls for acceleration, braking, the horn, and LED lights, all within a streamlined, wire-free structure. Magnesium's lightweight properties—33% lighter than aluminum—make XYZ one of the most portable electric bikes available. --- #### **Unmatched Frame Design** Crafted with precision using TORAY carbon fiber, the frame achieves the perfect balance between strength and minimal weight. The material, meticulously layered for durability, is the same advanced composite used in aerospace engineering. --- #### **Impressive Range** Powered by premium electric batteries, XYZ bikes are designed for extended use with fast charging times. Their energy management system ensures long-lasting performance, providing ranges of up to 25 miles per charge, depending on riding conditions. --- #### **Dynamic Power** Equipped with dual motors delivering up to 1,000 watts at peak output, XYZ effortlessly handles steep inclines and challenging terrains. Rare-earth magnets and thermal regulation technology ensure high efficiency and reliability. --- #### **Puncture-Proof Tires** Say goodbye to flat tires. XYZ's solid rubber tires incorporate innovative air pockets for built-in shock absorption, delivering a smooth yet responsive ride across various surfaces. --- #### **Advanced Braking System** XYZ's braking system combines electronic anti-lock functionality with a user-friendly friction brake. Riders can enjoy a customizable braking experience, whether relying on fingertip controls or a traditional foot brake. --- #### **Durable and Comfortable Deck** The single-piece aluminum deck integrates a silicon surface for superior grip, eliminating unnecessary bulk or harsh finishes for a clean, modern look. --- #### **Invisible Kickstand** XYZ's custom-designed kickstand is seamlessly integrated, providing stability without disrupting the bike's sleek aesthetics. --- ### Models Comparison #### **XYZ Classic** - Price: $990 - Range: Up to 12 miles - Charge Time: 3.5 hours (80%) - Weight: 28.5 lbs #### **XYZ Voyager** - Price: $1,490 - Range: Up to 25 miles - Charge Time: 2 hours (80%) - Weight: 29.6 lbs - Features: App integration for enhanced control and ride stats --- XYZ Electric Bike is not just a mode of transport—it's the future of urban mobility, combining cutting-edge technology, top-tier materials, and unparalleled design for a ride that's as stylish as it is functional.\"}]}", + "role": "tool", + "sender": null, + "tool_calls": null, + "tool_call_id": "call_CcNzb2N3lBt4JOCVrzyHdpdL", + "tool_name": "getArticleInfo" + } + ], + "state": { + "agent_data": [ + { + "child_functions": [ + "transfer_to_product_info_agent", + "transfer_to_delivery_info_agent", + "transfer_to_subscriptions_agent" + ], + "external_tools": [], + "history": [ + { + "content": "hi", + "current_turn": false, + "role": "user" + }, + { + "content": "Hello! How can I assist you today with your XYZ Bike?", + "created_at": "2024-12-18T07:45:03.670088", + "current_turn": false, + "response_type": "internal", + "role": "assistant", + "sender": "Main agent" + }, + { + "content": "i want to know about the range", + "current_turn": true, + "role": "user" + }, + { + "created_at": "2024-12-18T07:45:13.240846", + "current_turn": true, + "response_type": "internal", + "role": "assistant", + "sender": "Main agent", + "tool_calls": [ + { + "function": { + "arguments": "{\"args\":\"\",\"kwargs\":\"\"}", + "name": "transfer_to_product_info_agent" + }, + "id": "call_0MJHin0XCMyEJjA7T2FTJLZL", + "type": "function" + } + ] + }, + { + "content": "{\"assistant\": \"Product info agent\"}", + "created_at": "2024-12-18T07:45:13.241184", + "current_turn": true, + "response_type": "internal", + "role": "tool", + "tool_call_id": "call_0MJHin0XCMyEJjA7T2FTJLZL", + "tool_name": "transfer_to_product_info_agent" + }, + { + "created_at": "2024-12-18T07:45:13.821351", + "current_turn": true, + "response_type": "internal", + "role": "assistant", + "sender": "Product info agent", + "tool_calls": [ + { + "function": { + "arguments": "{\"question\":\"XYZ Bike travel range\"}", + "name": "getArticleInfo" + }, + "id": "call_CcNzb2N3lBt4JOCVrzyHdpdL", + "type": "function" + } + ] + } + ], + "instructions": "Role:\nYou are a customer support agent for XYZ Bikes. Your primary task is to facilitate conversations by passing control to specialized worker agents when needed.\n\n---\n\nTasks to Follow:\n- Engage in small talk if no specific question is asked.\n- Pass control to the appropriate worker agents for specialized conversations.\n\n---\n\nSmall Talk:\nYou are welcome to engage in basic small talk to build rapport.\n\n---\n\nExamples:\n\n---\nIn Scope Example 1:\nUser: How are you?\nAnswer: \"I'm doing well, thank you! How can I assist you today?\"\n\n---\nIn Scope Example 2:\nUser: What can you do?\nAnswer: \"I can help with customer support-related issues for XYZ Bikes. Let me know if you have any questions.\"\n\n---\nIn Scope Example 3:\nUser: I want a XYZ Bike.\nAnswer: \"What would you like to know about XYZ Bikes?\"\n\n---\nPass Control Example 1:\nUser: Tell me about the product features.\nAction: Pass control to the Product info agent.\n\n---\nPass Control Example 2:\nUser: Where is my scooter?\nAction: Pass control to the Delivery info agent.\n\n---\nPass Control Example 3:\nUser: I need help with my return.\nAction: Pass control to the Returns agent.\n\n---\nPass Control Example 4:\nUser: How does the Unagi subscription work?\nAction: Pass control to the Subscriptions agent.\n\n---\nāœ… Dos:\n- Engage in small talk when necessary.\n- Pass control to the appropriate agent based on the user's query.\n\n---\nāŒ Don'ts:\n- Do not focus excessively on greetings during ongoing conversations.\n- Do not continue the conversation if you suspect the user is confused or uninterested in Unagi support.\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "internal_tools": [], + "most_recent_parent_name": "", + "name": "Main agent", + "parent_function": null + }, + { + "child_functions": [], + "external_tools": [ + "getArticleInfo" + ], + "history": [ + { + "content": "i want to know about the range", + "current_turn": true, + "role": "user" + }, + { + "created_at": "2024-12-18T07:45:13.821351", + "current_turn": true, + "response_type": "internal", + "role": "assistant", + "sender": "Product info agent", + "tool_calls": [ + { + "function": { + "arguments": "{\"question\":\"XYZ Bike travel range\"}", + "name": "getArticleInfo" + }, + "id": "call_CcNzb2N3lBt4JOCVrzyHdpdL", + "type": "function" + } + ] + } + ], + "instructions": "šŸ§‘ā€šŸ’¼ Role:\nYou are a product information agent for XYZ Bikes. Your job is to answer search for the right article and answer questions strictly based on the article about Unagi products. Feel free to ask the user clarification questions if needed.\n\n---\n\nšŸ“œ Instructions:\n\nāœ… In Scope:\n- Answer questions strictly about Unagi product information.\n\nāŒ Out of Scope:\n- Questions about delivery, returns, subscriptions, and promotions.\n- Any topic unrelated to Unagi products.\n- If a question is out of scope, call give_up_control and do not attempt to answer it.\n\n---\n\nāœ”ļø Dos:\n- Stick to the facts provided in the articles.\n- Provide complete and direct answers to the user's questions.\n- Call the Greeting agent after each interaction.\n\n---\n\n🚫 Don'ts:\n- Do not partially answer questions or direct users to a URL for more information.\n- Do not provide information outside of the given context.\n\n---\n\nšŸ“ Examples:\n\n---\nIn Scope Example 1:\nUser: What is the maximum speed of the Unagi E500?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 2:\nUser: How long does it take to charge a XYZ Bike fully?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 3:\nUser: Can you tell me about the weight-carrying capacity of XYZ Bikes?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 4:\nUser: What are the differences between the E250 and E500 models?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 5:\nUser: How far can I travel on a single charge with the E500?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 6:\nUser: Is the scooter waterproof?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 7:\nUser: Does the scooter have any safety features?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 8:\nUser: What materials are used to make XYZ Bikes?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 9:\nUser: Can the scooter be used off-road?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 10:\nUser: Are spare parts available for purchase?\nAction: Call get_article_info followed by .\n\n---\nOut of Scope Example 1:\nUser: What is the status of my order delivery?\nAction: Call give_up_control.\n\n---\nOut of Scope Example 2:\nUser: How do I process a return?\nAction: Call give_up_control.\n\n---\nOut of Scope Example 3:\nUser: Can you tell me more about the subscription plans?\nAction: Call give_up_control.\n\n---\nOut of Scope Example 4:\nUser: Are there any promotions or discounts?\nAction: Call give_up_control.\n\n---\nOut of Scope Example 5:\nUser: Who won the last election?\nAction: Call give_up_control.\n\nProvide your output in the following structured JSON format:\n\n{\n \"steps_completed\": ,\n \"current_step\": ,\n \"reasoning\": \"\",\n \"error_count\": ,\n \"response_to_user\": \"\"\n}\n\nAlways ensure that all pertinent details, including tables or structured lists, are contained within the response_to_user field to maintain clarity and a comprehensive response for the user.\n\nRetrieval instructions:\n\nIn every turn, retrieve a relevant article and use the information from that article to answer the user's question.\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "internal_tools": [], + "most_recent_parent_name": "Main agent", + "name": "Product info agent", + "parent_function": "give_up_chat_control" + }, + { + "child_functions": [], + "external_tools": [ + "get_delivery_details", + "getArticleInfo" + ], + "history": [], + "instructions": "Role:\nYou are responsible for providing delivery information to the user.\n\n---\n\nāš™ļø Steps to Follow:\n1. Fetch the delivery details using the function: get_shipping_details.\n2. Answer the user's question based on the fetched delivery details.\n3. If the user's issue concerns refunds or other topics beyond delivery, politely inform them that the information is not available within this chat and express regret for the inconvenience.\n\n---\n\nāœ… In Scope:\nQuestions about delivery status, shipping timelines, and delivery processes.\nGeneric delivery/shipping-related questions where answers can be sourced from articles.\n\n---\n\nāŒ Out of Scope:\nQuestions unrelated to delivery or shipping.\nQuestions about products features, returns, subscriptions, or promotions.\nIf a question is out of scope, politely inform the user and avoid providing an answer.\n\n---\n\nExample 1:\nUser: What is the status of my delivery?\nAction: Call get_delivery_details to fetch the current delivery status and inform the user.\n\nExample 2:\nUser: Can you explain the delivery process?\nAction: Provide a detailed answer and clarify any user questions based on the articles.\n\nExample 3:\nUser: I have a question about product features such as range, durability etc.\nAction: give_up_control as this is not in your scope.\n\n---\n\nāœ… Dos:\nUse get_shipping_details to fetch accurate delivery information.\nProvide complete and clear answers based on the delivery details.\nFor generic delivery questions, refer to relevant articles if necessary.\nStick to factual information when answering.\n\n---\n\nāŒ Don'ts:\nDo not provide answers without fetching delivery details when required.\nDo not leave the user with partial information.\nRefrain from phrases like 'please contact support'; instead, relay information limitations gracefully.\n\nProvide your output in the following structured JSON format:\n\n{\n \"steps_completed\": ,\n \"current_step\": ,\n \"reasoning\": \"\",\n \"error_count\": ,\n \"response_to_user\": \"\"\n}\n\nAlways ensure that all pertinent details, including tables or structured lists, are contained within the response_to_user field to maintain clarity and a comprehensive response for the user.\n\nRetrieval instructions:\n\nIn every turn, retrieve a relevant article and use the information from that article to answer the user's question.\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "internal_tools": [], + "most_recent_parent_name": "", + "name": "Delivery info agent", + "parent_function": null + }, + { + "child_functions": [], + "external_tools": [], + "history": [], + "instructions": "talk about returns\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "internal_tools": [], + "most_recent_parent_name": "", + "name": "Returns agent", + "parent_function": null + }, + { + "child_functions": [], + "external_tools": [], + "history": [], + "instructions": "talk about subscriptions\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "internal_tools": [], + "most_recent_parent_name": "", + "name": "Subscriptions agent", + "parent_function": null + }, + { + "child_functions": [], + "external_tools": [], + "history": [], + "instructions": "Talk about promotions\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "internal_tools": [], + "most_recent_parent_name": "", + "name": "Promotions agent", + "parent_function": null + }, + { + "child_functions": [], + "external_tools": [], + "history": [], + "instructions": "Role:\nYou are a test agent for XYZ Bikes. Your job is to help test the functionality of different operations within the system.\n\n---\n\nTasks to Follow:\n- Assist in simulating various scenarios and operations to ensure smooth functioning.\n- Report any discrepancies or issues observed during testing.\n\n---\n\nIn Scope:\n- Conduct user interaction tests.\n- Evaluate agent response accuracy.\n- Validate agent transition accuracy.\n\n---\n\nOut of Scope:\n- Direct customer interactions outside of test scenarios.\n- Handling of live customer support queries.\n\n---\n\nDos:\n- Conduct comprehensive tests to cover all expected operations and scenarios.\n- Document test outcomes clearly.\n\n---\n\nDon'ts:\n- Do not intervene in live interactions unless part of a test scenario.\n- Ensure test operations do not affect live customer service functions.\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "internal_tools": [], + "most_recent_parent_name": "", + "name": "Test agent", + "parent_function": null + } + ], + "last_agent_name": "Product info agent" + }, + "agents": [ + { + "name": "Main agent", + "type": "conversation", + "description": "The Main agent orchestrates interactions between various specialized worker agents to ensure efficient handling of user queries and support needs.", + "instructions": "Role:\nYou are a customer support agent for XYZ Bikes. Your primary task is to facilitate conversations by passing control to specialized worker agents when needed.\n\n---\n\nTasks to Follow:\n- Engage in small talk if no specific question is asked.\n- Pass control to the appropriate worker agents for specialized conversations.\n\n---\n\nSmall Talk:\nYou are welcome to engage in basic small talk to build rapport.\n\n---\n\nExamples:\n\n---\nIn Scope Example 1:\nUser: How are you?\nAnswer: \"I'm doing well, thank you! How can I assist you today?\"\n\n---\nIn Scope Example 2:\nUser: What can you do?\nAnswer: \"I can help with customer support-related issues for XYZ Bikes. Let me know if you have any questions.\"\n\n---\nIn Scope Example 3:\nUser: I want a XYZ Bike.\nAnswer: \"What would you like to know about XYZ Bikes?\"\n\n---\nPass Control Example 1:\nUser: Tell me about the product features.\nAction: Pass control to the Product info agent.\n\n---\nPass Control Example 2:\nUser: Where is my scooter?\nAction: Pass control to the Delivery info agent.\n\n---\nPass Control Example 3:\nUser: I need help with my return.\nAction: Pass control to the Returns agent.\n\n---\nPass Control Example 4:\nUser: How does the Unagi subscription work?\nAction: Pass control to the Subscriptions agent.\n\n---\nāœ… Dos:\n- Engage in small talk when necessary.\n- Pass control to the appropriate agent based on the user's query.\n\n---\nāŒ Don'ts:\n- Do not focus excessively on greetings during ongoing conversations.\n- Do not continue the conversation if you suspect the user is confused or uninterested in Unagi support.\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "tools": [], + "model": "gpt-4o-mini", + "hasRagSources": false, + "connectedAgents": [ + "Product info agent", + "Delivery info agent", + "Subscriptions agent" + ], + "controlType": "retain" + }, + { + "name": "Post process", + "type": "post_process", + "instructions": "- Extract the response_to_user field from the provided structured JSON and ensure that this is the only content you use for the final output.\n- Ensure that the agent response covers all the details the user asked for.\n- When providing long details, use bullets to distinguish the different points. \n- Focus specifically on the response_to_user field in its input.\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "tools": [], + "model": "gpt-4o", + "hasRagSources": false, + "connectedAgents": [] + }, + { + "name": "Product info agent", + "type": "conversation", + "description": "You assist with product-related questions by retrieving relevant articles and information.", + "instructions": "šŸ§‘ā€šŸ’¼ Role:\nYou are a product information agent for XYZ Bikes. Your job is to answer search for the right article and answer questions strictly based on the article about Unagi products. Feel free to ask the user clarification questions if needed.\n\n---\n\nšŸ“œ Instructions:\n\nāœ… In Scope:\n- Answer questions strictly about Unagi product information.\n\nāŒ Out of Scope:\n- Questions about delivery, returns, subscriptions, and promotions.\n- Any topic unrelated to Unagi products.\n- If a question is out of scope, call give_up_control and do not attempt to answer it.\n\n---\n\nāœ”ļø Dos:\n- Stick to the facts provided in the articles.\n- Provide complete and direct answers to the user's questions.\n- Call the Greeting agent after each interaction.\n\n---\n\n🚫 Don’ts:\n- Do not partially answer questions or direct users to a URL for more information.\n- Do not provide information outside of the given context.\n\n---\n\nšŸ“ Examples:\n\n---\nIn Scope Example 1:\nUser: What is the maximum speed of the Unagi E500?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 2:\nUser: How long does it take to charge a XYZ Bike fully?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 3:\nUser: Can you tell me about the weight-carrying capacity of XYZ Bikes?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 4:\nUser: What are the differences between the E250 and E500 models?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 5:\nUser: How far can I travel on a single charge with the E500?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 6:\nUser: Is the scooter waterproof?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 7:\nUser: Does the scooter have any safety features?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 8:\nUser: What materials are used to make XYZ Bikes?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 9:\nUser: Can the scooter be used off-road?\nAction: Call get_article_info followed by .\n\n---\nIn Scope Example 10:\nUser: Are spare parts available for purchase?\nAction: Call get_article_info followed by .\n\n---\nOut of Scope Example 1:\nUser: What is the status of my order delivery?\nAction: Call give_up_control.\n\n---\nOut of Scope Example 2:\nUser: How do I process a return?\nAction: Call give_up_control.\n\n---\nOut of Scope Example 3:\nUser: Can you tell me more about the subscription plans?\nAction: Call give_up_control.\n\n---\nOut of Scope Example 4:\nUser: Are there any promotions or discounts?\nAction: Call give_up_control.\n\n---\nOut of Scope Example 5:\nUser: Who won the last election?\nAction: Call give_up_control.\n\nProvide your output in the following structured JSON format:\n\n{\n \"steps_completed\": ,\n \"current_step\": ,\n \"reasoning\": \"\",\n \"error_count\": ,\n \"response_to_user\": \"\"\n}\n\nAlways ensure that all pertinent details, including tables or structured lists, are contained within the response_to_user field to maintain clarity and a comprehensive response for the user.\n\nRetrieval instructions:\n\nIn every turn, retrieve a relevant article and use the information from that article to answer the user's question.\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "tools": [], + "model": "gpt-4o-mini", + "hasRagSources": true, + "connectedAgents": [], + "controlType": "relinquish_to_parent" + }, + { + "name": "Delivery info agent", + "type": "conversation", + "description": "You are responsible for providing accurate delivery status and shipping details for orders.", + "instructions": "Role:\nYou are responsible for providing delivery information to the user.\n\n---\n\nāš™ļø Steps to Follow:\n1. Fetch the delivery details using the function: get_shipping_details.\n2. Answer the user's question based on the fetched delivery details.\n3. If the user's issue concerns refunds or other topics beyond delivery, politely inform them that the information is not available within this chat and express regret for the inconvenience.\n\n---\n\nāœ… In Scope:\nQuestions about delivery status, shipping timelines, and delivery processes.\nGeneric delivery/shipping-related questions where answers can be sourced from articles.\n\n---\n\nāŒ Out of Scope:\nQuestions unrelated to delivery or shipping.\nQuestions about products features, returns, subscriptions, or promotions.\nIf a question is out of scope, politely inform the user and avoid providing an answer.\n\n---\n\nExample 1:\nUser: What is the status of my delivery?\nAction: Call get_delivery_details to fetch the current delivery status and inform the user.\n\nExample 2:\nUser: Can you explain the delivery process?\nAction: Provide a detailed answer and clarify any user questions based on the articles.\n\nExample 3:\nUser: I have a question about product features such as range, durability etc.\nAction: give_up_control as this is not in your scope.\n\n---\n\nāœ… Dos:\nUse get_shipping_details to fetch accurate delivery information.\nProvide complete and clear answers based on the delivery details.\nFor generic delivery questions, refer to relevant articles if necessary.\nStick to factual information when answering.\n\n---\n\nāŒ Don’ts:\nDo not provide answers without fetching delivery details when required.\nDo not leave the user with partial information.\nRefrain from phrases like 'please contact support'; instead, relay information limitations gracefully.\n\nProvide your output in the following structured JSON format:\n\n{\n \"steps_completed\": ,\n \"current_step\": ,\n \"reasoning\": \"\",\n \"error_count\": ,\n \"response_to_user\": \"\"\n}\n\nAlways ensure that all pertinent details, including tables or structured lists, are contained within the response_to_user field to maintain clarity and a comprehensive response for the user.\n\nRetrieval instructions:\n\nIn every turn, retrieve a relevant article and use the information from that article to answer the user's question.\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "tools": [ + "get_delivery_details" + ], + "model": "gpt-4o-mini", + "hasRagSources": true, + "connectedAgents": [], + "controlType": "retain" + }, + { + "name": "Returns agent", + "type": "conversation", + "description": "You provide assistance for inquiries and processes related to product returns.", + "instructions": "talk about returns\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "tools": [], + "model": "gpt-4o-mini", + "hasRagSources": false, + "connectedAgents": [] + }, + { + "name": "Subscriptions agent", + "type": "conversation", + "description": "You handle all subscription-related queries from customers.", + "instructions": "talk about subscriptions\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "tools": [], + "model": "gpt-4o-mini", + "hasRagSources": false, + "connectedAgents": [] + }, + { + "name": "Promotions agent", + "type": "conversation", + "description": "You provide current promotions and discounts details to the customers.", + "instructions": "Talk about promotions\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "tools": [], + "model": "gpt-4o-mini", + "hasRagSources": false, + "connectedAgents": [] + }, + { + "name": "Test agent", + "type": "conversation", + "description": "Your job is to simulate various customer interactions and test system operations for quality assurance purposes.", + "instructions": "Role:\nYou are a test agent for XYZ Bikes. Your job is to help test the functionality of different operations within the system.\n\n---\n\nTasks to Follow:\n- Assist in simulating various scenarios and operations to ensure smooth functioning.\n- Report any discrepancies or issues observed during testing.\n\n---\n\nIn Scope:\n- Conduct user interaction tests.\n- Evaluate agent response accuracy.\n- Validate agent transition accuracy.\n\n---\n\nOut of Scope:\n- Direct customer interactions outside of test scenarios.\n- Handling of live customer support queries.\n\n---\n\nDos:\n- Conduct comprehensive tests to cover all expected operations and scenarios.\n- Document test outcomes clearly.\n\n---\n\nDon’ts:\n- Do not intervene in live interactions unless part of a test scenario.\n- Ensure test operations do not affect live customer service functions.\n\nSelf Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'.", + "tools": [], + "model": "gpt-4o-mini", + "hasRagSources": false, + "connectedAgents": [] + }, + { + "name": "Escalation", + "type": "escalation", + "description": "", + "instructions": "Get the user's contact information and let them know that their request has been escalated.\n\n", + "tools": [], + "model": "gpt-4o-mini", + "hasRagSources": false, + "connectedAgents": [], + "controlType": "retain" + } + ], + "tools": [ + { + "name": "get_delivery_details", + "description": "Return a estimated delivery date for the XYZ Bike.", + "parameters": { + "type": "object", + "properties": {}, + "required": [] + } + }, + { + "name": "get_subscription_plan_details", + "description": "Return details of the available subscription plans for XYZ Bikes.", + "parameters": { + "type": "object", + "properties": {}, + "required": [] + } + }, + { + "name": "get_current_date", + "description": "Return the current date.", + "parameters": { + "type": "object", + "properties": {}, + "required": [] + } + } + ], + "prompts": [ + { + "name": "Style prompt", + "type": "style_prompt", + "prompt": "You should be empathetic and helpful." + }, + { + "name": "reasoning_output", + "type": "base_prompt", + "prompt": "Give your output in the following format:\n\nreason : \n\nresponse_to_user : " + }, + { + "name": "get_delivery_details", + "type": "base_prompt", + "prompt": "Return a estimated delivery date for XYZ Bike." + }, + { + "name": "structured_output", + "type": "base_prompt", + "prompt": "Provide your output in the following structured JSON format:\n\n{\n \"steps_completed\": ,\n \"current_step\": ,\n \"reasoning\": \"\",\n \"error_count\": ,\n \"response_to_user\": \"\"\n}\n\nAlways ensure that all pertinent details, including tables or structured lists, are contained within the response_to_user field to maintain clarity and a comprehensive response for the user." + }, + { + "name": "rag_article_prompt", + "type": "base_prompt", + "prompt": "Retrieval instructions:\n\nIn every turn, retrieve a relevant article and use the information from that article to answer the user's question." + }, + { + "name": "self_support_prompt", + "type": "base_prompt", + "prompt": "Self Support Guidance:\n\nThe bot should not suggest phrases like 'let me connect you to support' or 'you can reach out to support'. Instead, the agent is the customer support. It can say 'I apologize, but I don't have the right information'." + } + ], + "startAgent": "Main agent" + } +} \ No newline at end of file diff --git a/rowboat_agents/tests/sample_requests/example2.json b/rowboat_agents/tests/sample_requests/example2.json new file mode 100644 index 000000000..8a8058bac --- /dev/null +++ b/rowboat_agents/tests/sample_requests/example2.json @@ -0,0 +1,121 @@ +{ + "lastRequest": { + "messages": [ + { + "content": "hi", + "role": "user", + "sender": null, + "tool_calls": null, + "tool_call_id": null, + "tool_name": null + } + ], + "state": { + "last_agent_name": "Cashout Agent" + }, + "agents": [ + { + "name": "Guardrails", + "type": "guardrails", + "description": "", + "instructions": "Stick to the facts and do not make any assumptions.\n\n", + "tools": [], + "model": "gpt-4o-mini", + "hasRagSources": false, + "connectedAgents": [], + "controlType": "retain" + }, + { + "name": "Post process", + "type": "post_process", + "description": "", + "instructions": "Ensure that the agent response is terse and to the point.\n\n", + "tools": [], + "model": "gpt-4o-mini", + "hasRagSources": false, + "connectedAgents": [], + "controlType": "retain" + }, + { + "name": "Escalation", + "type": "escalation", + "description": "", + "instructions": "Get the user's contact information and let them know that their request has been escalated.\n\n", + "tools": [], + "model": "gpt-4o-mini", + "hasRagSources": false, + "connectedAgents": [], + "controlType": "retain" + }, + { + "name": "Cashout Agent", + "type": "conversation", + "description": "Responsible for handling user cashouts.", + "instructions": "## šŸ§‘ā€šŸ’¼ Role:\n\nYou are responsible for assisting users with cashing out their balance.\n\n---\n\n## āš™ļø Steps to Follow:\n\n1. Check if any 'no-cashout' flag is set for the user using the function: check_no_cashout_flag.\n2. Retrieve the user balance using the function: get_user_balance.\n3. If the balance is greater than zero, provide the user with the steps to cash out:\n - Step 1: Navigate to the 'Cashout' section in the application.\n - Step 2: Select the amount you wish to cash out.\n - Step 3: Confirm the transaction details.\n - Step 4: Submit the request and wait for confirmation.\n4. If the balance is zero or negative, pass control to the Crypto Cashout agent.\n\n---\n\n## šŸŽÆ Scope:\n\nāœ… In Scope:\n- Checking user-specific flags related to cashout.\n- Providing steps for cashing out when balance permits.\n\nāŒ Out of Scope:\n- Detailed financial advice beyond cashing out.\n\n---\n\n## šŸ“‹ Guidelines:\n\nāœ”ļø Dos:\n- Use check_no_cashout_flag and get_user_balance functions appropriately.\n- Provide clear cashout instructions when conditions are met.\n\n🚫 Don'ts:\n- Do not proceed with cashout steps if 'no-cashout' flag is present.\n\n---\n\n", + "tools": [ + "check_no_cashout_flag", + "get_user_balance" + ], + "model": "gpt-4o", + "hasRagSources": false, + "connectedAgents": [ + "Crypto Cashout Agent" + ], + "controlType": "relinquish_to_parent" + }, + { + "name": "Crypto Cashout Agent", + "type": "conversation", + "description": "Handles scenarios where the cashout balance is zero or negative and considers cryptocurrency options.", + "instructions": "This is a dummy agent for handling cashout through cryptocurrency when the balance is insufficient.\n\n", + "tools": [], + "model": "gpt-4o", + "hasRagSources": false, + "connectedAgents": [], + "controlType": "retain" + } + ], + "tools": [ + { + "name": "check_no_cashout_flag", + "description": "Function to check if the user has a 'no-cashout' flag.", + "parameters": { + "type": "object", + "properties": { + "userId": { + "type": "string", + "description": "The ID of the user to check the cashout flag for." + } + }, + "required": [ + "userId" + ] + } + }, + { + "name": "get_user_balance", + "description": "Function to retrieve the user's current balance.", + "parameters": { + "type": "object", + "properties": { + "userId": { + "type": "string", + "description": "The ID of the user to retrieve the balance for." + } + }, + "required": [ + "userId" + ] + } + } + ], + "prompts": [ + { + "name": "Style prompt", + "type": "style_prompt", + "prompt": "You should be empathetic and helpful." + } + ], + "startAgent": "Cashout Agent" + } +} \ No newline at end of file diff --git a/rowboat_agents/tests/sample_requests/example3.json b/rowboat_agents/tests/sample_requests/example3.json new file mode 100644 index 000000000..112d1a657 --- /dev/null +++ b/rowboat_agents/tests/sample_requests/example3.json @@ -0,0 +1,94 @@ +{ + "lastRequest": { + "messages": [ + { + "content": "please call the mocked tool", + "role": "user", + "sender": null, + "tool_calls": null, + "tool_call_id": null, + "tool_name": null + } + ], + "state": { + "last_agent_name": "Example Agent" + }, + "agents": [ + { + "name": "Example Agent", + "type": "conversation", + "description": "", + "instructions": "## šŸ§‘ā€ Role:\nYou are an helpful customer support assistant\n\n---\n## āš™ļø Steps to Follow:\n1. Ask the user what they would like help with\n2. Ask the user for their email address and let them know someone will contact them soon.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Asking the user their issue\n- Getting their email\n\nāŒ Out of Scope:\n- Questions unrelated to customer support\n- If a question is out of scope, politely inform the user and avoid providing an answer.\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- ask user their issue\n\nāŒ Don'ts:\n- don't ask user any other detail than email\n\n", + "tools": [ + "unmocked_tool", + "mocked_tool" + ], + "model": "gpt-4o-mini", + "hasRagSources": false, + "connectedAgents": [], + "controlType": "retain" + }, + { + "name": "Guardrails", + "type": "guardrails", + "description": "", + "instructions": "Stick to the facts and do not make any assumptions.\n\n", + "tools": [], + "model": "gpt-4o-mini", + "hasRagSources": false, + "connectedAgents": [], + "controlType": "retain" + }, + { + "name": "Post process", + "type": "post_process", + "description": "", + "instructions": "Ensure that the agent response is terse and to the point.\n\n", + "tools": [], + "model": "gpt-4o-mini", + "hasRagSources": false, + "connectedAgents": [], + "controlType": "retain" + }, + { + "name": "Escalation", + "type": "escalation", + "description": "", + "instructions": "Get the user's contact information and let them know that their request has been escalated.\n\n", + "tools": [], + "model": "gpt-4o-mini", + "hasRagSources": false, + "connectedAgents": [], + "controlType": "retain" + } + ], + "tools": [ + { + "name": "unmocked_tool", + "description": "", + "parameters": { + "type": "object", + "properties": {}, + "required": [] + } + }, + { + "name": "mocked_tool", + "description": "", + "parameters": { + "type": "object", + "properties": {}, + "required": [] + } + } + ], + "prompts": [ + { + "name": "Style prompt", + "type": "style_prompt", + "prompt": "You should be empathetic and helpful." + } + ], + "startAgent": "Example Agent" + } +} \ No newline at end of file diff --git a/rowboat_agents/tests/sample_requests/tmp1.json b/rowboat_agents/tests/sample_requests/tmp1.json new file mode 100644 index 000000000..2fbb42551 --- /dev/null +++ b/rowboat_agents/tests/sample_requests/tmp1.json @@ -0,0 +1,469 @@ +{ + "messages": [ + { + "role": "system", + "content": "" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:20:53.651Z", + "role": "assistant", + "content": "How can I help you today?", + "agenticSender": "Blog Writer Hub", + "agenticResponseType": "external" + }, + { + "role": "user", + "content": "I want to write a blog", + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:20:58.206Z" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:20:59.319Z", + "role": "assistant", + "content": "Great! What topic would you like your blog to be about?", + "agenticSender": "Blog Writer Hub", + "agenticResponseType": "external" + }, + { + "role": "user", + "content": "Bitcoin", + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:05.048Z" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:07.311Z", + "role": "assistant", + "tool_calls": [ + { + "id": "62a9b5ec-3dbf-4fdd-85cb-10da37611726", + "function": { + "name": "transfer_to_agent", + "arguments": "{\"assistant\": \"Research Agent\"}" + }, + "type": "function" + } + ], + "agenticSender": "Blog Writer Hub", + "agenticResponseType": "internal" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:07.311Z", + "role": "tool", + "content": "{\"assistant\": \"Research Agent\"}", + "tool_call_id": "62a9b5ec-3dbf-4fdd-85cb-10da37611726", + "tool_name": "transfer_to_agent" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:10.018Z", + "role": "assistant", + "content": "Bitcoin is a decentralized digital currency that operates without a central authority or government, relying on blockchain technology to record transactions securely and transparently. It was created in 2009 by an anonymous person or group known as Satoshi Nakamoto, aiming to provide an alternative to traditional financial systems. Bitcoin's value is highly volatile, influenced by factors such as market demand, regulatory developments, and macroeconomic trends. It is widely used for investment, remittances, and as a means of transferring value across borders, but it also faces criticism for its association with illicit activities and environmental concerns due to energy-intensive mining processes. Despite these challenges, Bitcoin continues to gain mainstream acceptance, with increasing adoption by businesses and institutional investors.", + "agenticSender": "Research Agent", + "agenticResponseType": "internal" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:10.919Z", + "role": "assistant", + "tool_calls": [ + { + "id": "2085cbb1-270e-4c20-a6ef-6f9e4a3d27cf", + "function": { + "name": "transfer_to_agent", + "arguments": "{\"assistant\": \"Bullet Point Agent\"}" + }, + "type": "function" + } + ], + "agenticSender": "Blog Writer Hub", + "agenticResponseType": "internal" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:10.920Z", + "role": "tool", + "content": "{\"assistant\": \"Bullet Point Agent\"}", + "tool_call_id": "2085cbb1-270e-4c20-a6ef-6f9e4a3d27cf", + "tool_name": "transfer_to_agent" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:14.776Z", + "role": "assistant", + "content": "- Bitcoin is a decentralized digital currency that operates without a central authority or government.\n- It uses blockchain technology to securely and transparently record transactions.\n- Created in 2009 by the anonymous Satoshi Nakamoto as an alternative to traditional financial systems.\n- Bitcoin's value is highly volatile, affected by market demand, regulations, and macroeconomic trends.\n- Common uses include investment, remittances, and cross-border value transfers.\n- Criticized for links to illicit activities and environmental impact from energy-intensive mining.\n- Despite challenges, Bitcoin is gaining mainstream acceptance and adoption by businesses and institutional investors.", + "agenticSender": "Bullet Point Agent", + "agenticResponseType": "internal" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:15.946Z", + "role": "assistant", + "tool_calls": [ + { + "id": "d48c608a-a660-451b-a3ee-eeebb1cf60ca", + "function": { + "name": "transfer_to_agent", + "arguments": "{\"assistant\": \"Blog Writing Agent\"}" + }, + "type": "function" + } + ], + "agenticSender": "Blog Writer Hub", + "agenticResponseType": "internal" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:15.946Z", + "role": "tool", + "content": "{\"assistant\": \"Blog Writing Agent\"}", + "tool_call_id": "d48c608a-a660-451b-a3ee-eeebb1cf60ca", + "tool_name": "transfer_to_agent" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:32.934Z", + "role": "assistant", + "content": "Bitcoin: Revolutionizing the World of Digital Currency\n\nIn recent years, Bitcoin has become a buzzword in financial circles and beyond. As the first and most prominent cryptocurrency, it has sparked debates, inspired innovation, and challenged traditional notions of money. But what exactly is Bitcoin, and why does it continue to capture the world’s attention? Let’s explore the key aspects that define Bitcoin and its growing influence.\n\nAt its core, Bitcoin is a decentralized digital currency that operates without the oversight of a central authority or government. This means that no single entity controls its issuance or transactions, setting it apart from traditional currencies managed by central banks. The decentralized nature of Bitcoin is one of its most revolutionary features, empowering individuals to have greater control over their finances.\n\nThe backbone of Bitcoin’s security and transparency is blockchain technology. Every transaction made with Bitcoin is recorded on a public ledger known as the blockchain. This technology ensures that transactions are secure, tamper-proof, and visible to anyone, fostering trust among users and reducing the risk of fraud.\n\nBitcoin was introduced in 2009 by an anonymous figure or group using the pseudonym Satoshi Nakamoto. The goal was to create an alternative to traditional financial systems, offering a peer-to-peer method of transferring value without intermediaries. This vision has resonated with millions, especially those seeking more autonomy over their money.\n\nOne of the most talked-about aspects of Bitcoin is its price volatility. The value of Bitcoin can fluctuate dramatically, influenced by factors such as market demand, regulatory changes, and broader economic trends. While this volatility presents opportunities for investors, it also introduces risks that potential users should carefully consider.\n\nBitcoin’s uses have evolved over time. Initially, it was seen primarily as a digital alternative to cash. Today, it is widely used for investment purposes, remittances, and transferring value across borders. Its ability to facilitate fast, low-cost international transactions has made it especially popular in regions with limited access to traditional banking services.\n\nHowever, Bitcoin is not without its critics. It has faced scrutiny for its association with illicit activities, given the pseudonymous nature of transactions. Additionally, the process of mining new bitcoins is energy-intensive, raising concerns about its environmental impact.\n\nDespite these challenges, Bitcoin’s journey toward mainstream acceptance continues. More businesses are beginning to accept Bitcoin as payment, and institutional investors are increasingly adding it to their portfolios. This growing adoption signals a shift in how society views and uses money.\n\nIn conclusion, Bitcoin represents a significant shift in the world of finance. Its decentralized structure, innovative technology, and expanding use cases make it a fascinating subject for anyone interested in the future of money. As Bitcoin continues to evolve, it will undoubtedly play a pivotal role in shaping the digital economy of tomorrow.", + "agenticSender": "Blog Writing Agent", + "agenticResponseType": "internal" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:38.064Z", + "role": "assistant", + "content": "Here is your complete blog post on \"Bitcoin\":\n\n---\n\n**Bitcoin: Revolutionizing the World of Digital Currency**\n\nIn recent years, Bitcoin has become a buzzword in financial circles and beyond. As the first and most prominent cryptocurrency, it has sparked debates, inspired innovation, and challenged traditional notions of money. But what exactly is Bitcoin, and why does it continue to capture the world’s attention? Let’s explore the key aspects that define Bitcoin and its growing influence.\n\nAt its core, Bitcoin is a decentralized digital currency that operates without the oversight of a central authority or government. This means that no single entity controls its issuance or transactions, setting it apart from traditional currencies managed by central banks. The decentralized nature of Bitcoin is one of its most revolutionary features, empowering individuals to have greater control over their finances.\n\nThe backbone of Bitcoin’s security and transparency is blockchain technology. Every transaction made with Bitcoin is recorded on a public ledger known as the blockchain. This technology ensures that transactions are secure, tamper-proof, and visible to anyone, fostering trust among users and reducing the risk of fraud.\n\nBitcoin was introduced in 2009 by an anonymous figure or group using the pseudonym Satoshi Nakamoto. The goal was to create an alternative to traditional financial systems, offering a peer-to-peer method of transferring value without intermediaries. This vision has resonated with millions, especially those seeking more autonomy over their money.\n\nOne of the most talked-about aspects of Bitcoin is its price volatility. The value of Bitcoin can fluctuate dramatically, influenced by factors such as market demand, regulatory changes, and broader economic trends. While this volatility presents opportunities for investors, it also introduces risks that potential users should carefully consider.\n\nBitcoin’s uses have evolved over time. Initially, it was seen primarily as a digital alternative to cash. Today, it is widely used for investment purposes, remittances, and transferring value across borders. Its ability to facilitate fast, low-cost international transactions has made it especially popular in regions with limited access to traditional banking services.\n\nHowever, Bitcoin is not without its critics. It has faced scrutiny for its association with illicit activities, given the pseudonymous nature of transactions. Additionally, the process of mining new bitcoins is energy-intensive, raising concerns about its environmental impact.\n\nDespite these challenges, Bitcoin’s journey toward mainstream acceptance continues. More businesses are beginning to accept Bitcoin as payment, and institutional investors are increasingly adding it to their portfolios. This growing adoption signals a shift in how society views and uses money.\n\nIn conclusion, Bitcoin represents a significant shift in the world of finance. Its decentralized structure, innovative technology, and expanding use cases make it a fascinating subject for anyone interested in the future of money. As Bitcoin continues to evolve, it will undoubtedly play a pivotal role in shaping the digital economy of tomorrow.\n\n---\n\nIf you need any edits or want to focus on a specific aspect of Bitcoin, let me know!", + "agenticSender": "Blog Writer Hub", + "agenticResponseType": "external" + } + ], + "lastRequest": { + "projectId": "2a4908bb-e350-4335-97df-c48615b98fe9", + "messages": [ + { + "content": "", + "role": "system", + "sender": null, + "tool_calls": null, + "tool_call_id": null, + "tool_name": null + }, + { + "content": "How can I help you today?", + "role": "assistant", + "sender": "Blog Writer Hub", + "tool_calls": null, + "tool_call_id": null, + "tool_name": null, + "response_type": "external" + }, + { + "content": "I want to write a blog", + "role": "user", + "sender": null, + "tool_calls": null, + "tool_call_id": null, + "tool_name": null + }, + { + "content": "Great! What topic would you like your blog to be about?", + "role": "assistant", + "sender": "Blog Writer Hub", + "tool_calls": null, + "tool_call_id": null, + "tool_name": null, + "response_type": "external" + }, + { + "content": "Bitcoin", + "role": "user", + "sender": null, + "tool_calls": null, + "tool_call_id": null, + "tool_name": null + } + ], + "state": { + "last_agent_name": "Blog Writer Hub", + "tokens": { + "total": 0, + "prompt": 0, + "completion": 0 + }, + "turn_messages": [ + { + "content": "Sender agent: Blog Writer Hub\nContent: Great! What topic would you like your blog to be about?", + "role": "assistant", + "sender": "Blog Writer Hub", + "tool_calls": null, + "tool_call_id": null, + "tool_name": null, + "response_type": "external" + } + ] + }, + "agents": [ + { + "name": "Blog Writer Hub", + "type": "conversation", + "description": "Hub agent to orchestrate the blog writing process: research, bullet points, and blog composition.", + "instructions": "## šŸ§‘ā€šŸ’¼ Role:\nYou are the hub agent responsible for orchestrating the blog writing process for the user.\n\n---\n## āš™ļø Steps to Follow:\n1. Greet the user and ask for the blog topic.\n2. FIRST: Send the topic to [@agent:Research Agent] for research and wait for the summary.\n3. THEN: Send the research summary to [@agent:Bullet Point Agent] to generate bullet points and wait for the result.\n4. THEN: Send the bullet points to [@agent:Blog Writing Agent] to compose the full blog post and wait for the result.\n5. Return the final blog post to the user.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Orchestrating the blog writing process\n- Interacting with the user for topic input and delivering the final blog post\n\nāŒ Out of Scope:\n- Performing research, bullet point generation, or blog writing directly\n- Handling unrelated queries\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- Follow the strict sequence: Research → Bullet Points → Blog Writing\n- Only interact with the user for topic input and final output\n\n🚫 Don'ts:\n- Do not perform research, bullet point generation, or blog writing yourself\n- Do not try to get multiple agent responses in parallel\n- Do not reference internal agents by name to the user\n- CRITICAL: The system does not support more than 1 tool call in a single output when the tool call is about transferring to another agent (a handoff). You must only put out 1 transfer related tool call in one output.\n\n# Examples\n- **User** : I want to write a blog about 'Benefits of remote work'.\n - **Agent actions**: Call [@agent:Research Agent]\n\n- **Agent receives research summary** :\n - **Agent actions**: Call [@agent:Bullet Point Agent]\n\n- **Agent receives bullet points** :\n - **Agent actions**: Call [@agent:Blog Writing Agent]\n\n- **Agent receives blog post** :\n - **Agent response**: Here is your complete blog post on 'Benefits of remote work':\n\n[Full blog post content]\n\n- **User** : Can I get a blog on 'AI in education'?\n - **Agent actions**: Call [@agent:Research Agent](#mention)", + "model": "gpt-4.1", + "controlType": "retain", + "ragK": 3, + "ragReturnType": "chunks", + "outputVisibility": "user_facing", + "tools": [], + "prompts": [], + "connectedAgents": [ + "Research Agent", + "Bullet Point Agent", + "Blog Writing Agent" + ] + }, + { + "name": "Research Agent", + "type": "conversation", + "description": "Researches and summarizes information on a given blog topic.", + "instructions": "## šŸ§‘ā€šŸ’¼ Role:\nYou are responsible for researching the given blog topic and providing a concise summary of key findings.\n\n---\n## āš™ļø Steps to Follow:\n1. Use the [@tool:web_search] tool to gather information about the provided topic. This is important. You must call @tool:web_search without fail even if you know about the topic yourself.\n2. Summarize the most relevant and recent findings in 3-5 sentences.\n3. Return the summary to the calling agent.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Researching any blog topic provided\n- Summarizing findings concisely\n\nāŒ Out of Scope:\n- Generating bullet points or writing the blog post\n- Interacting directly with the user\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- Use only reliable sources\n- Keep the summary factual and neutral\n\n🚫 Don'ts:\n- Do not include personal opinions\n- Do not generate bullet points or blog content\n- Do not preface your actions by saying things like \"I will begin by researching...\" or \"Once I have the research summary...\". Simply call the web_search tool, summarize, and return the result to the calling agent.\n- Do not interact with the user directly.\n\n# Examples\n- **User** : Research the topic 'Benefits of remote work'.\n - **Agent actions**: Call [@tool:web_search](#mention)\n - **Agent response**: Remote work offers increased flexibility, improved work-life balance, and reduced commuting time. Studies show higher productivity and job satisfaction among remote employees. However, it can also lead to feelings of isolation if not managed well.\n\n- **User** : Research the topic 'AI in education'.\n - **Agent actions**: Call [@tool:web_search](#mention)\n - **Agent response**: Artificial intelligence in education enables personalized learning, automates administrative tasks, and provides data-driven insights. However, it raises concerns about data privacy and the need for teacher training.", + "model": "gpt-4.1", + "controlType": "retain", + "ragK": 3, + "ragReturnType": "chunks", + "outputVisibility": "internal", + "tools": [ + "web_search" + ], + "prompts": [], + "connectedAgents": [] + }, + { + "name": "Bullet Point Agent", + "type": "conversation", + "description": "Generates key bullet points from the research summary for the blog.", + "instructions": "## šŸ§‘ā€šŸ’¼ Role:\nYou are responsible for extracting and generating 5-7 key bullet points from the research summary provided.\n\n---\n## āš™ļø Steps to Follow:\n1. Read the research summary.\n2. Identify the most important facts, arguments, or insights.\n3. Generate 5-7 concise bullet points covering the main ideas.\n4. Return the bullet points to the calling agent.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Generating bullet points from research summaries\n\nāŒ Out of Scope:\n- Conducting research\n- Writing the full blog post\n- Interacting directly with the user\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- Ensure each bullet point is clear and self-contained\n\n🚫 Don'ts:\n- Do not repeat information\n- Do not add new information not present in the summary\n\n# Examples\n- **User** : Generate bullet points from the summary: 'Remote work offers increased flexibility, improved work-life balance, and reduced commuting time. Studies show higher productivity and job satisfaction among remote employees. However, it can also lead to feelings of isolation if not managed well.'\n - **Agent response**: \n - Increased flexibility for employees\n - Improved work-life balance\n - Reduced commuting time\n - Higher productivity among remote workers\n - Greater job satisfaction\n - Potential for isolation if not managed", + "model": "gpt-4.1", + "controlType": "retain", + "ragK": 3, + "ragReturnType": "chunks", + "outputVisibility": "internal", + "tools": [], + "prompts": [], + "connectedAgents": [] + }, + { + "name": "Blog Writing Agent", + "type": "conversation", + "description": "Writes a full blog post based on provided bullet points.", + "instructions": "## šŸ§‘ā€šŸ’¼ Role:\nYou are responsible for composing a well-structured blog post using the provided bullet points.\n\n---\n## āš™ļø Steps to Follow:\n1. Read the bullet points.\n2. Write an engaging introduction related to the topic.\n3. Expand on each bullet point in a logical order, creating clear paragraphs.\n4. Conclude the blog post with a summary or call to action.\n5. Return the complete blog post to the calling agent.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Writing blog posts from bullet points\n\nāŒ Out of Scope:\n- Conducting research\n- Generating bullet points\n- Interacting directly with the user\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- Maintain a friendly and informative tone\n- Ensure logical flow and readability\n\n🚫 Don'ts:\n- Do not add information not present in the bullet points\n- Do not interact with the user\n\n# Examples\n- **User** : Write a blog post using these bullet points: [list of bullet points]\n - **Agent response**: [A well-structured blog post expanding on each bullet point, with an introduction and conclusion]", + "model": "gpt-4.1", + "controlType": "retain", + "ragK": 3, + "ragReturnType": "chunks", + "outputVisibility": "internal", + "tools": [], + "prompts": [], + "connectedAgents": [] + } + ], + "tools": [ + { + "name": "web_search", + "description": "Fetch information from the web based on chat context", + "parameters": { + "type": "object", + "properties": {} + }, + "isLibrary": true + } + ], + "prompts": [], + "startAgent": "Blog Writer Hub", + "mcpServers": [], + "toolWebhookUrl": "" + }, + "lastResponse": { + "state": { + "last_agent_name": "Blog Writer Hub", + "tokens": { + "total": 0, + "prompt": 0, + "completion": 0 + }, + "turn_messages": [ + { + "content": "Sender agent: Research Agent\nContent: Bitcoin is a decentralized digital currency that operates without a central authority or government, relying on blockchain technology to record transactions securely and transparently. It was created in 2009 by an anonymous person or group known as Satoshi Nakamoto, aiming to provide an alternative to traditional financial systems. Bitcoin's value is highly volatile, influenced by factors such as market demand, regulatory developments, and macroeconomic trends. It is widely used for investment, remittances, and as a means of transferring value across borders, but it also faces criticism for its association with illicit activities and environmental concerns due to energy-intensive mining processes. Despite these challenges, Bitcoin continues to gain mainstream acceptance, with increasing adoption by businesses and institutional investors.", + "role": "assistant", + "sender": "Research Agent", + "tool_calls": null, + "tool_call_id": null, + "tool_name": null, + "response_type": "internal" + }, + { + "content": "Sender agent: Bullet Point Agent\nContent: - Bitcoin is a decentralized digital currency that operates without a central authority or government.\n- It uses blockchain technology to securely and transparently record transactions.\n- Created in 2009 by the anonymous Satoshi Nakamoto as an alternative to traditional financial systems.\n- Bitcoin's value is highly volatile, affected by market demand, regulations, and macroeconomic trends.\n- Common uses include investment, remittances, and cross-border value transfers.\n- Criticized for links to illicit activities and environmental impact from energy-intensive mining.\n- Despite challenges, Bitcoin is gaining mainstream acceptance and adoption by businesses and institutional investors.", + "role": "assistant", + "sender": "Bullet Point Agent", + "tool_calls": null, + "tool_call_id": null, + "tool_name": null, + "response_type": "internal" + }, + { + "content": "Sender agent: Blog Writing Agent\nContent: Bitcoin: Revolutionizing the World of Digital Currency\n\nIn recent years, Bitcoin has become a buzzword in financial circles and beyond. As the first and most prominent cryptocurrency, it has sparked debates, inspired innovation, and challenged traditional notions of money. But what exactly is Bitcoin, and why does it continue to capture the world’s attention? Let’s explore the key aspects that define Bitcoin and its growing influence.\n\nAt its core, Bitcoin is a decentralized digital currency that operates without the oversight of a central authority or government. This means that no single entity controls its issuance or transactions, setting it apart from traditional currencies managed by central banks. The decentralized nature of Bitcoin is one of its most revolutionary features, empowering individuals to have greater control over their finances.\n\nThe backbone of Bitcoin’s security and transparency is blockchain technology. Every transaction made with Bitcoin is recorded on a public ledger known as the blockchain. This technology ensures that transactions are secure, tamper-proof, and visible to anyone, fostering trust among users and reducing the risk of fraud.\n\nBitcoin was introduced in 2009 by an anonymous figure or group using the pseudonym Satoshi Nakamoto. The goal was to create an alternative to traditional financial systems, offering a peer-to-peer method of transferring value without intermediaries. This vision has resonated with millions, especially those seeking more autonomy over their money.\n\nOne of the most talked-about aspects of Bitcoin is its price volatility. The value of Bitcoin can fluctuate dramatically, influenced by factors such as market demand, regulatory changes, and broader economic trends. While this volatility presents opportunities for investors, it also introduces risks that potential users should carefully consider.\n\nBitcoin’s uses have evolved over time. Initially, it was seen primarily as a digital alternative to cash. Today, it is widely used for investment purposes, remittances, and transferring value across borders. Its ability to facilitate fast, low-cost international transactions has made it especially popular in regions with limited access to traditional banking services.\n\nHowever, Bitcoin is not without its critics. It has faced scrutiny for its association with illicit activities, given the pseudonymous nature of transactions. Additionally, the process of mining new bitcoins is energy-intensive, raising concerns about its environmental impact.\n\nDespite these challenges, Bitcoin’s journey toward mainstream acceptance continues. More businesses are beginning to accept Bitcoin as payment, and institutional investors are increasingly adding it to their portfolios. This growing adoption signals a shift in how society views and uses money.\n\nIn conclusion, Bitcoin represents a significant shift in the world of finance. Its decentralized structure, innovative technology, and expanding use cases make it a fascinating subject for anyone interested in the future of money. As Bitcoin continues to evolve, it will undoubtedly play a pivotal role in shaping the digital economy of tomorrow.", + "role": "assistant", + "sender": "Blog Writing Agent", + "tool_calls": null, + "tool_call_id": null, + "tool_name": null, + "response_type": "internal" + }, + { + "content": "Sender agent: Blog Writer Hub\nContent: Here is your complete blog post on \"Bitcoin\":\n\n---\n\n**Bitcoin: Revolutionizing the World of Digital Currency**\n\nIn recent years, Bitcoin has become a buzzword in financial circles and beyond. As the first and most prominent cryptocurrency, it has sparked debates, inspired innovation, and challenged traditional notions of money. But what exactly is Bitcoin, and why does it continue to capture the world’s attention? Let’s explore the key aspects that define Bitcoin and its growing influence.\n\nAt its core, Bitcoin is a decentralized digital currency that operates without the oversight of a central authority or government. This means that no single entity controls its issuance or transactions, setting it apart from traditional currencies managed by central banks. The decentralized nature of Bitcoin is one of its most revolutionary features, empowering individuals to have greater control over their finances.\n\nThe backbone of Bitcoin’s security and transparency is blockchain technology. Every transaction made with Bitcoin is recorded on a public ledger known as the blockchain. This technology ensures that transactions are secure, tamper-proof, and visible to anyone, fostering trust among users and reducing the risk of fraud.\n\nBitcoin was introduced in 2009 by an anonymous figure or group using the pseudonym Satoshi Nakamoto. The goal was to create an alternative to traditional financial systems, offering a peer-to-peer method of transferring value without intermediaries. This vision has resonated with millions, especially those seeking more autonomy over their money.\n\nOne of the most talked-about aspects of Bitcoin is its price volatility. The value of Bitcoin can fluctuate dramatically, influenced by factors such as market demand, regulatory changes, and broader economic trends. While this volatility presents opportunities for investors, it also introduces risks that potential users should carefully consider.\n\nBitcoin’s uses have evolved over time. Initially, it was seen primarily as a digital alternative to cash. Today, it is widely used for investment purposes, remittances, and transferring value across borders. Its ability to facilitate fast, low-cost international transactions has made it especially popular in regions with limited access to traditional banking services.\n\nHowever, Bitcoin is not without its critics. It has faced scrutiny for its association with illicit activities, given the pseudonymous nature of transactions. Additionally, the process of mining new bitcoins is energy-intensive, raising concerns about its environmental impact.\n\nDespite these challenges, Bitcoin’s journey toward mainstream acceptance continues. More businesses are beginning to accept Bitcoin as payment, and institutional investors are increasingly adding it to their portfolios. This growing adoption signals a shift in how society views and uses money.\n\nIn conclusion, Bitcoin represents a significant shift in the world of finance. Its decentralized structure, innovative technology, and expanding use cases make it a fascinating subject for anyone interested in the future of money. As Bitcoin continues to evolve, it will undoubtedly play a pivotal role in shaping the digital economy of tomorrow.\n\n---\n\nIf you need any edits or want to focus on a specific aspect of Bitcoin, let me know!", + "role": "assistant", + "sender": "Blog Writer Hub", + "tool_calls": null, + "tool_call_id": null, + "tool_name": null, + "response_type": "external" + } + ] + }, + "messages": [ + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:07.311Z", + "role": "assistant", + "tool_calls": [ + { + "id": "62a9b5ec-3dbf-4fdd-85cb-10da37611726", + "function": { + "name": "transfer_to_agent", + "arguments": "{\"assistant\": \"Research Agent\"}" + }, + "type": "function" + } + ], + "agenticSender": "Blog Writer Hub", + "agenticResponseType": "internal" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:07.311Z", + "role": "tool", + "content": "{\"assistant\": \"Research Agent\"}", + "tool_call_id": "62a9b5ec-3dbf-4fdd-85cb-10da37611726", + "tool_name": "transfer_to_agent" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:10.018Z", + "role": "assistant", + "content": "Bitcoin is a decentralized digital currency that operates without a central authority or government, relying on blockchain technology to record transactions securely and transparently. It was created in 2009 by an anonymous person or group known as Satoshi Nakamoto, aiming to provide an alternative to traditional financial systems. Bitcoin's value is highly volatile, influenced by factors such as market demand, regulatory developments, and macroeconomic trends. It is widely used for investment, remittances, and as a means of transferring value across borders, but it also faces criticism for its association with illicit activities and environmental concerns due to energy-intensive mining processes. Despite these challenges, Bitcoin continues to gain mainstream acceptance, with increasing adoption by businesses and institutional investors.", + "agenticSender": "Research Agent", + "agenticResponseType": "internal" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:10.919Z", + "role": "assistant", + "tool_calls": [ + { + "id": "2085cbb1-270e-4c20-a6ef-6f9e4a3d27cf", + "function": { + "name": "transfer_to_agent", + "arguments": "{\"assistant\": \"Bullet Point Agent\"}" + }, + "type": "function" + } + ], + "agenticSender": "Blog Writer Hub", + "agenticResponseType": "internal" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:10.920Z", + "role": "tool", + "content": "{\"assistant\": \"Bullet Point Agent\"}", + "tool_call_id": "2085cbb1-270e-4c20-a6ef-6f9e4a3d27cf", + "tool_name": "transfer_to_agent" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:14.776Z", + "role": "assistant", + "content": "- Bitcoin is a decentralized digital currency that operates without a central authority or government.\n- It uses blockchain technology to securely and transparently record transactions.\n- Created in 2009 by the anonymous Satoshi Nakamoto as an alternative to traditional financial systems.\n- Bitcoin's value is highly volatile, affected by market demand, regulations, and macroeconomic trends.\n- Common uses include investment, remittances, and cross-border value transfers.\n- Criticized for links to illicit activities and environmental impact from energy-intensive mining.\n- Despite challenges, Bitcoin is gaining mainstream acceptance and adoption by businesses and institutional investors.", + "agenticSender": "Bullet Point Agent", + "agenticResponseType": "internal" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:15.946Z", + "role": "assistant", + "tool_calls": [ + { + "id": "d48c608a-a660-451b-a3ee-eeebb1cf60ca", + "function": { + "name": "transfer_to_agent", + "arguments": "{\"assistant\": \"Blog Writing Agent\"}" + }, + "type": "function" + } + ], + "agenticSender": "Blog Writer Hub", + "agenticResponseType": "internal" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:15.946Z", + "role": "tool", + "content": "{\"assistant\": \"Blog Writing Agent\"}", + "tool_call_id": "d48c608a-a660-451b-a3ee-eeebb1cf60ca", + "tool_name": "transfer_to_agent" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:32.934Z", + "role": "assistant", + "content": "Bitcoin: Revolutionizing the World of Digital Currency\n\nIn recent years, Bitcoin has become a buzzword in financial circles and beyond. As the first and most prominent cryptocurrency, it has sparked debates, inspired innovation, and challenged traditional notions of money. But what exactly is Bitcoin, and why does it continue to capture the world’s attention? Let’s explore the key aspects that define Bitcoin and its growing influence.\n\nAt its core, Bitcoin is a decentralized digital currency that operates without the oversight of a central authority or government. This means that no single entity controls its issuance or transactions, setting it apart from traditional currencies managed by central banks. The decentralized nature of Bitcoin is one of its most revolutionary features, empowering individuals to have greater control over their finances.\n\nThe backbone of Bitcoin’s security and transparency is blockchain technology. Every transaction made with Bitcoin is recorded on a public ledger known as the blockchain. This technology ensures that transactions are secure, tamper-proof, and visible to anyone, fostering trust among users and reducing the risk of fraud.\n\nBitcoin was introduced in 2009 by an anonymous figure or group using the pseudonym Satoshi Nakamoto. The goal was to create an alternative to traditional financial systems, offering a peer-to-peer method of transferring value without intermediaries. This vision has resonated with millions, especially those seeking more autonomy over their money.\n\nOne of the most talked-about aspects of Bitcoin is its price volatility. The value of Bitcoin can fluctuate dramatically, influenced by factors such as market demand, regulatory changes, and broader economic trends. While this volatility presents opportunities for investors, it also introduces risks that potential users should carefully consider.\n\nBitcoin’s uses have evolved over time. Initially, it was seen primarily as a digital alternative to cash. Today, it is widely used for investment purposes, remittances, and transferring value across borders. Its ability to facilitate fast, low-cost international transactions has made it especially popular in regions with limited access to traditional banking services.\n\nHowever, Bitcoin is not without its critics. It has faced scrutiny for its association with illicit activities, given the pseudonymous nature of transactions. Additionally, the process of mining new bitcoins is energy-intensive, raising concerns about its environmental impact.\n\nDespite these challenges, Bitcoin’s journey toward mainstream acceptance continues. More businesses are beginning to accept Bitcoin as payment, and institutional investors are increasingly adding it to their portfolios. This growing adoption signals a shift in how society views and uses money.\n\nIn conclusion, Bitcoin represents a significant shift in the world of finance. Its decentralized structure, innovative technology, and expanding use cases make it a fascinating subject for anyone interested in the future of money. As Bitcoin continues to evolve, it will undoubtedly play a pivotal role in shaping the digital economy of tomorrow.", + "agenticSender": "Blog Writing Agent", + "agenticResponseType": "internal" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-06T10:21:38.064Z", + "role": "assistant", + "content": "Here is your complete blog post on \"Bitcoin\":\n\n---\n\n**Bitcoin: Revolutionizing the World of Digital Currency**\n\nIn recent years, Bitcoin has become a buzzword in financial circles and beyond. As the first and most prominent cryptocurrency, it has sparked debates, inspired innovation, and challenged traditional notions of money. But what exactly is Bitcoin, and why does it continue to capture the world’s attention? Let’s explore the key aspects that define Bitcoin and its growing influence.\n\nAt its core, Bitcoin is a decentralized digital currency that operates without the oversight of a central authority or government. This means that no single entity controls its issuance or transactions, setting it apart from traditional currencies managed by central banks. The decentralized nature of Bitcoin is one of its most revolutionary features, empowering individuals to have greater control over their finances.\n\nThe backbone of Bitcoin’s security and transparency is blockchain technology. Every transaction made with Bitcoin is recorded on a public ledger known as the blockchain. This technology ensures that transactions are secure, tamper-proof, and visible to anyone, fostering trust among users and reducing the risk of fraud.\n\nBitcoin was introduced in 2009 by an anonymous figure or group using the pseudonym Satoshi Nakamoto. The goal was to create an alternative to traditional financial systems, offering a peer-to-peer method of transferring value without intermediaries. This vision has resonated with millions, especially those seeking more autonomy over their money.\n\nOne of the most talked-about aspects of Bitcoin is its price volatility. The value of Bitcoin can fluctuate dramatically, influenced by factors such as market demand, regulatory changes, and broader economic trends. While this volatility presents opportunities for investors, it also introduces risks that potential users should carefully consider.\n\nBitcoin’s uses have evolved over time. Initially, it was seen primarily as a digital alternative to cash. Today, it is widely used for investment purposes, remittances, and transferring value across borders. Its ability to facilitate fast, low-cost international transactions has made it especially popular in regions with limited access to traditional banking services.\n\nHowever, Bitcoin is not without its critics. It has faced scrutiny for its association with illicit activities, given the pseudonymous nature of transactions. Additionally, the process of mining new bitcoins is energy-intensive, raising concerns about its environmental impact.\n\nDespite these challenges, Bitcoin’s journey toward mainstream acceptance continues. More businesses are beginning to accept Bitcoin as payment, and institutional investors are increasingly adding it to their portfolios. This growing adoption signals a shift in how society views and uses money.\n\nIn conclusion, Bitcoin represents a significant shift in the world of finance. Its decentralized structure, innovative technology, and expanding use cases make it a fascinating subject for anyone interested in the future of money. As Bitcoin continues to evolve, it will undoubtedly play a pivotal role in shaping the digital economy of tomorrow.\n\n---\n\nIf you need any edits or want to focus on a specific aspect of Bitcoin, let me know!", + "agenticSender": "Blog Writer Hub", + "agenticResponseType": "external" + } + ] + } +} \ No newline at end of file diff --git a/rowboat_agents/tests/sample_requests/tmp2.json b/rowboat_agents/tests/sample_requests/tmp2.json new file mode 100644 index 000000000..676e98600 --- /dev/null +++ b/rowboat_agents/tests/sample_requests/tmp2.json @@ -0,0 +1,167 @@ +{ + "messages": [ + { + "role": "system", + "content": "" + }, + { + "version": "v1", + "chatId": "", + "createdAt": "2025-05-05T17:29:15.822Z", + "role": "assistant", + "content": "How can I help you today?", + "agenticSender": "Interview Evaluation Hub", + "agenticResponseType": "external" + }, + { + "role": "user", + "content": "[2025-05-02, 10:00] Assistant: Good morning. Thank you for taking the time to speak with us today. Could you start by briefly walking us through your leadership journey over the past decade?\n\n[2025-05-02, 10:01] User: Absolutely. Over the past 12 years, I've led product and technology teams across high-growth B2B SaaS companies. Most recently, I was the Chief Product Officer at a fintech scaleup, where I built a team of 60+ PMs and oversaw the expansion into three international markets. Prior to that, I was VP of Product at a healthtech startup acquired by a public company.\n\n[2025-05-02, 10:03] Assistant: Impressive. What would you say is your core leadership style, and how has it evolved over the years?\n\n[2025-05-02, 10:04] User: I'd describe my style as mission-oriented and data-driven. Early in my career, I leaned heavily on execution. But with time, I've grown more focused on setting context and empowering teams. At my last company, I implemented OKRs org-wide and focused on building a culture of trust and transparency.\n\n[2025-05-02, 10:06] Assistant: How do you approach scaling an org while preserving agility and innovation?\n\n[2025-05-02, 10:07] User: Great question. I believe the key is in modular team design. At the fintech company, we adopted a \"startup within a startup\" model—each pod owned metrics and had embedded PMs, designers, and engineers. This preserved autonomy and speed while scaling. We also invested heavily in internal tooling and rituals like demo days to keep the culture of innovation alive.\n\n[2025-05-02, 10:10] Assistant: Tell me about a time you made a high-stakes decision without perfect data.\n\n[2025-05-02, 10:11] User: In early 2023, we had to decide whether to sunset a low-performing but vocal customer-facing feature. We lacked conclusive cohort data but saw consistent qualitative feedback about confusion and churn. I convened a task force, aligned with customer success and execs, and made the call. Within 60 days, churn dropped by 12%, validating the bet.\n\n[2025-05-02, 10:14] Assistant: Final question—what excites you about potentially joining our company as a Chief Product Officer?\n\n[2025-05-02, 10:15] User: Your mission aligns with what I care about—bringing transparency and efficiency to financial infrastructure. The team's reputation, your engineering culture, and the velocity of product innovation are all huge draws. I believe I can help you scale without losing the edge that got you here.", + "version": "v1", + "chatId": "", + "createdAt": "2025-05-05T17:29:44.695Z" + } + ], + "lastRequest": { + "projectId": "a7a91831-e410-4fc5-a31d-8d027e14540a", + "messages": [ + { + "content": "", + "role": "system", + "sender": null, + "tool_calls": null, + "tool_call_id": null, + "tool_name": null + }, + { + "content": "How can I help you today?", + "role": "assistant", + "sender": "Interview Evaluation Hub", + "tool_calls": null, + "tool_call_id": null, + "tool_name": null, + "response_type": "external" + }, + { + "content": "[2025-05-02, 10:00] Assistant: Good morning. Thank you for taking the time to speak with us today. Could you start by briefly walking us through your leadership journey over the past decade?\n\n[2025-05-02, 10:01] User: Absolutely. Over the past 12 years, I've led product and technology teams across high-growth B2B SaaS companies. Most recently, I was the Chief Product Officer at a fintech scaleup, where I built a team of 60+ PMs and oversaw the expansion into three international markets. Prior to that, I was VP of Product at a healthtech startup acquired by a public company.\n\n[2025-05-02, 10:03] Assistant: Impressive. What would you say is your core leadership style, and how has it evolved over the years?\n\n[2025-05-02, 10:04] User: I'd describe my style as mission-oriented and data-driven. Early in my career, I leaned heavily on execution. But with time, I've grown more focused on setting context and empowering teams. At my last company, I implemented OKRs org-wide and focused on building a culture of trust and transparency.\n\n[2025-05-02, 10:06] Assistant: How do you approach scaling an org while preserving agility and innovation?\n\n[2025-05-02, 10:07] User: Great question. I believe the key is in modular team design. At the fintech company, we adopted a \"startup within a startup\" model—each pod owned metrics and had embedded PMs, designers, and engineers. This preserved autonomy and speed while scaling. We also invested heavily in internal tooling and rituals like demo days to keep the culture of innovation alive.\n\n[2025-05-02, 10:10] Assistant: Tell me about a time you made a high-stakes decision without perfect data.\n\n[2025-05-02, 10:11] User: In early 2023, we had to decide whether to sunset a low-performing but vocal customer-facing feature. We lacked conclusive cohort data but saw consistent qualitative feedback about confusion and churn. I convened a task force, aligned with customer success and execs, and made the call. Within 60 days, churn dropped by 12%, validating the bet.\n\n[2025-05-02, 10:14] Assistant: Final question—what excites you about potentially joining our company as a Chief Product Officer?\n\n[2025-05-02, 10:15] User: Your mission aligns with what I care about—bringing transparency and efficiency to financial infrastructure. The team's reputation, your engineering culture, and the velocity of product innovation are all huge draws. I believe I can help you scale without losing the edge that got you here.", + "role": "user", + "sender": null, + "tool_calls": null, + "tool_call_id": null, + "tool_name": null + } + ], + "state": { + "last_agent_name": "Interview Evaluation Hub", + "tokens": { + "total": 0, + "prompt": 0, + "completion": 0 + }, + "turn_messages": [ + { + "content": "How can I help you today?", + "role": "assistant", + "sender": "Interview Evaluation Hub", + "tool_calls": null, + "tool_call_id": null, + "tool_name": null, + "response_type": "external" + } + ] + }, + "agents": [ + { + "name": "Interview Evaluation Hub", + "type": "conversation", + "description": "Hub agent to orchestrate the evaluation of interview transcripts between an executive search agency and a CxO candidate.", + "instructions": "## šŸ§‘ā€šŸ’¼ Role:\nYou are the hub agent responsible for orchestrating the evaluation of interview transcripts between an executive search agency (Assistant) and a CxO candidate (User).\n\n---\n## āš™ļø Steps to Follow:\n1. Receive the transcript in the specified format.\n2. FIRST: Send the transcript to [@agent:Evaluation Agent] for evaluation.\n3. Wait to receive the complete evaluation from the Evaluation Agent.\n4. THEN: Send the received evaluation to [@agent:Call Decision] to determine if the call quality is sufficient.\n5. Based on the Call Decision response:\n - If approved: Inform the user that the call has been approved and will proceed to profile creation.\n - If rejected: Inform the user that the call quality was insufficient and provide the reason.\n6. Return the final result (rejection reason or approval confirmation) to the user.\n\n---\n## šŸŽÆ Scope:\nāœ… In Scope:\n- Orchestrating the sequential evaluation and decision process for interview transcripts.\n\nāŒ Out of Scope:\n- Directly evaluating or creating profiles.\n- Handling transcripts not in the specified format.\n- Interacting with the individual evaluation agents.\n\n---\n## šŸ“‹ Guidelines:\nāœ”ļø Dos:\n- Follow the strict sequence: Evaluation Agent first, then Call Decision.\n- Wait for each agent's complete response before proceeding.\n- Only interact with the user for final results or format clarification.\n\n🚫 Don'ts:\n- Do not perform evaluation or profile creation yourself.\n- Do not modify the transcript.\n- Do not try to get evaluations simultaneously.\n- Do not reference the individual evaluation agents.\n- CRITICAL: The system does not support more than 1 tool call in a single output when the tool call is about transferring to another agent (a handoff). You must only put out 1 transfer related tool call in one output.\n\n# Examples\n- **User** : Here is the interview transcript: [2024-04-25, 10:00] User: I have 20 years of experience... [2024-04-25, 10:01] Assistant: Can you describe your leadership style?\n - **Agent actions**: \n 1. First call [@agent:Evaluation Agent](#mention)\n 2. Wait for complete evaluation\n 3. Then call [@agent:Call Decision](#mention)\n\n- **Agent receives evaluation and decision (approved)** :\n - **Agent response**: The call has been approved. Proceeding to candidate profile creation.\n\n- **Agent receives evaluation and decision (rejected)** :\n - **Agent response**: The call quality was insufficient to proceed. [Provide reason from Call Decision agent]\n\n- **User** : The transcript is in a different format.\n - **Agent response**: Please provide the transcript in the specified format: [,