From c273bbaa92299f4fa34dd3ed0fa6abfe5ba25cd7 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 13 Jun 2025 09:35:13 -0400 Subject: [PATCH 1/7] Admin api and updated workflows --- .../cleanup-all/cleanup-test-projects.py | 407 +--------- .github/actions/project-create/script.py | 96 +-- .github/actions/project-delete/action.yml | 7 - .../actions/project-delete/delete-project.py | 82 +- .github/workflows/testing-integration.yaml | 22 + codegen/build-oas.sh | 2 +- docs/admin.rst | 23 + docs/conf.py | 7 +- docs/index.rst | 1 + pinecone/__init__.py | 2 + pinecone/admin/__init__.py | 3 + pinecone/admin/admin.py | 229 ++++++ .../admin/project_eraser.py | 102 +-- pinecone/admin/resources/__init__.py | 4 + pinecone/admin/resources/api_key.py | 210 ++++++ pinecone/admin/resources/project.py | 502 +++++++++++++ pinecone/config/openapi_configuration.py | 7 + pinecone/core/openapi/admin/__init__.py | 30 + pinecone/core/openapi/admin/api/__init__.py | 3 + .../core/openapi/admin/api/api_keys_api.py | 590 +++++++++++++++ .../core/openapi/admin/api/projects_api.py | 703 ++++++++++++++++++ pinecone/core/openapi/admin/apis/__init__.py | 17 + pinecone/core/openapi/admin/model/__init__.py | 5 + pinecone/core/openapi/admin/model/api_key.py | 303 ++++++++ .../admin/model/api_key_with_secret.py | 290 ++++++++ .../admin/model/create_api_key_request.py | 291 ++++++++ .../admin/model/create_project_request.py | 286 +++++++ .../openapi/admin/model/inline_response200.py | 278 +++++++ .../admin/model/inline_response2001.py | 278 +++++++ .../openapi/admin/model/inline_response401.py | 290 ++++++++ .../admin/model/inline_response401_error.py | 308 ++++++++ pinecone/core/openapi/admin/model/project.py | 317 ++++++++ .../admin/model/update_project_request.py | 280 +++++++ .../core/openapi/admin/models/__init__.py | 21 + .../db_data/api/namespace_operations_api.py | 12 +- .../db_data/api/vector_operations_api.py | 16 +- .../openapi/db_data/model/query_request.py | 4 +- .../openapi/db_data/model/upsert_request.py | 4 +- .../openapi/inference/api/inference_api.py | 24 +- pinecone/core/openapi/oauth/__init__.py | 30 + pinecone/core/openapi/oauth/api/__init__.py | 3 + pinecone/core/openapi/oauth/api/o_auth_api.py | 189 +++++ pinecone/core/openapi/oauth/apis/__init__.py | 16 + pinecone/core/openapi/oauth/model/__init__.py | 5 + .../openapi/oauth/model/inline_response400.py | 274 +++++++ .../core/openapi/oauth/model/token_request.py | 301 ++++++++ .../openapi/oauth/model/token_response.py | 290 ++++++++ .../core/openapi/oauth/models/__init__.py | 14 + poetry.lock | 500 ++++++++++++- pyproject.toml | 9 + tests/integration/admin/__init__.py | 0 tests/integration/admin/conftest.py | 0 tests/integration/admin/test_api_key.py | 127 ++++ tests/integration/admin/test_projects.py | 131 ++++ 54 files changed, 7300 insertions(+), 645 deletions(-) create mode 100644 docs/admin.rst create mode 100644 pinecone/admin/__init__.py create mode 100644 pinecone/admin/admin.py rename .github/actions/project-delete/delete-resources.py => pinecone/admin/project_eraser.py (78%) create mode 100644 pinecone/admin/resources/__init__.py create mode 100644 pinecone/admin/resources/api_key.py create mode 100644 pinecone/admin/resources/project.py create mode 100644 pinecone/core/openapi/admin/__init__.py create mode 100644 pinecone/core/openapi/admin/api/__init__.py create mode 100644 pinecone/core/openapi/admin/api/api_keys_api.py create mode 100644 pinecone/core/openapi/admin/api/projects_api.py create mode 100644 pinecone/core/openapi/admin/apis/__init__.py create mode 100644 pinecone/core/openapi/admin/model/__init__.py create mode 100644 pinecone/core/openapi/admin/model/api_key.py create mode 100644 pinecone/core/openapi/admin/model/api_key_with_secret.py create mode 100644 pinecone/core/openapi/admin/model/create_api_key_request.py create mode 100644 pinecone/core/openapi/admin/model/create_project_request.py create mode 100644 pinecone/core/openapi/admin/model/inline_response200.py create mode 100644 pinecone/core/openapi/admin/model/inline_response2001.py create mode 100644 pinecone/core/openapi/admin/model/inline_response401.py create mode 100644 pinecone/core/openapi/admin/model/inline_response401_error.py create mode 100644 pinecone/core/openapi/admin/model/project.py create mode 100644 pinecone/core/openapi/admin/model/update_project_request.py create mode 100644 pinecone/core/openapi/admin/models/__init__.py create mode 100644 pinecone/core/openapi/oauth/__init__.py create mode 100644 pinecone/core/openapi/oauth/api/__init__.py create mode 100644 pinecone/core/openapi/oauth/api/o_auth_api.py create mode 100644 pinecone/core/openapi/oauth/apis/__init__.py create mode 100644 pinecone/core/openapi/oauth/model/__init__.py create mode 100644 pinecone/core/openapi/oauth/model/inline_response400.py create mode 100644 pinecone/core/openapi/oauth/model/token_request.py create mode 100644 pinecone/core/openapi/oauth/model/token_response.py create mode 100644 pinecone/core/openapi/oauth/models/__init__.py create mode 100644 tests/integration/admin/__init__.py create mode 100644 tests/integration/admin/conftest.py create mode 100644 tests/integration/admin/test_api_key.py create mode 100644 tests/integration/admin/test_projects.py diff --git a/.github/actions/cleanup-all/cleanup-test-projects.py b/.github/actions/cleanup-all/cleanup-test-projects.py index 876c7bf5..bbf926a6 100644 --- a/.github/actions/cleanup-all/cleanup-test-projects.py +++ b/.github/actions/cleanup-all/cleanup-test-projects.py @@ -1,384 +1,11 @@ import logging -from pinecone import Pinecone, NotFoundException +from pinecone import Admin +from pinecone.admin.project_eraser import _ProjectEraser import time -from collections import deque -import json -import urllib3 import os -import dotenv -dotenv.load_dotenv() - -logging.basicConfig( - level=logging.DEBUG, format="%(levelname)-8s | %(name)s:%(lineno)4d | %(message)s" -) logger = logging.getLogger(__name__) - -class AdminAPI: - def __init__(self, client_id, client_secret): - self.http = urllib3.PoolManager() - self.token = None - self.token = self._get_token(client_id, client_secret) - - def _request(self, method, url, headers={}, body_dict=None): - logger.info(f"Requesting {method} {url} with body {body_dict}") - api_version = os.environ.get("API_VERSION", "2025-04") - default_headers = { - "X-Pinecone-Api-Version": api_version, - "Content-Type": "application/json", - "Accept": "application/json", - } - if self.token is not None: - default_headers["Authorization"] = f"Bearer {self.token}" - headers = {**default_headers, **headers} - - args = {"method": method, "url": url, "headers": headers} - if body_dict is not None: - args["body"] = json.dumps(body_dict) - response = self.http.request(**args) - - logger.info(f"Response Status: {response.status}") - if response.status >= 400: - raise Exception( - f"Request failed with status {response.status}: {response.data.decode('utf-8')}" - ) - - if response is None or response.data is None or response.data == b"": - return None - return json.loads(response.data.decode("utf-8")) - - def _get_token(self, client_id, client_secret): - response = self._request( - "POST", - "https://login.pinecone.io/oauth/token", - body_dict={ - "grant_type": "client_credentials", - "client_id": client_id, - "client_secret": client_secret, - "audience": "https://api.pinecone.io/", - }, - ) - - token = response["access_token"] - return token - - def create_project(self, project_name, max_pods): - response = self._request( - "POST", - "https://api.pinecone.io/admin/projects", - body_dict={"name": project_name, "max_pods": max_pods}, - ) - return response - - def create_api_key(self, project_id): - response = self._request( - "POST", - f"https://api.pinecone.io/admin/projects/{project_id}/api-keys", - body_dict={"name": "ci-key"}, - ) - return response - - def list_projects(self): - response = self._request("GET", "https://api.pinecone.io/admin/projects") - return response - - def describe_project(self, project_id): - response = self._request("GET", f"https://api.pinecone.io/admin/projects/{project_id}") - return response - - def delete_project(self, project_id): - response = self._request("DELETE", f"https://api.pinecone.io/admin/projects/{project_id}") - return response - - def get_project_id(self, project_name): - project_list = self.list_projects()["data"] - for project in project_list: - if project["name"] == project_name: - return project["id"] - return None - - -class RetryCounter: - def __init__(self, max_retries): - self.max_retries = max_retries - self.counts = {} - - def increment(self, key): - if key not in self.counts: - self.counts[key] = 0 - self.counts[key] += 1 - - def get_count(self, key): - return self.counts.get(key, 0) - - def is_maxed_out(self, key): - return self.get_count(key) >= self.max_retries - - -class ProjectEraser: - def __init__(self, api_key): - self.pc = Pinecone(api_key=api_key) - - # In situations where there are a lot of resources, we want to - # slow down the rate of requests just to avoid any concerns about - # rate limits - self.sleep_interval = 5 - self.undeleteable_resources = [] - - def pluralize(self, resource_name): - if resource_name.lower() == "index": - return resource_name + "es" - else: - return resource_name + "s" - - def _delete_all_of_resource(self, resource_name, list_func, delete_func, get_state_func): - resources_to_delete = deque(list_func()) - if len(resources_to_delete) == 0: - logger.info(f"No {self.pluralize(resource_name)} to delete") - return - - state_check_retries = RetryCounter(3) - failed_delete_retries = RetryCounter(3) - is_deletable_retries = RetryCounter(3) - is_terminating_retries = RetryCounter(10) - - undeletable_resources = [] - - while len(resources_to_delete) > 0: - logger.info( - f"There are {len(resources_to_delete)} {self.pluralize(resource_name)} left to delete" - ) - time.sleep(self.sleep_interval) - - resource = resources_to_delete.popleft() - logger.info(f"Processing {resource_name} {resource.name}") - - # Get the latest description of the resource - try: - state_check_retries.increment(resource.name) - state = get_state_func(name=resource.name) - logger.info(f"{resource_name} {resource.name} has state {state}") - except NotFoundException: - logger.info(f"{resource_name} {resource.name} has already been deleted, continuing") - continue - except Exception as e: - if state_check_retries.is_maxed_out(resource.name): - logger.error(f"Error describing {resource_name} {resource.name}: {e}") - undeletable_resources.append( - { - "resource": resource, - "type": resource_name, - "reason": f"Error describing {resource_name} {resource.name}: {e}", - } - ) - continue - else: - logger.info( - f"{resource_name} {resource.name} has been returned to the back of the delete queue" - ) - resources_to_delete.append(resource) - continue - - if state == "Terminating" or state == "Terminated": - is_terminating_retries.increment(resource.name) - if is_terminating_retries.is_maxed_out(resource.name): - logger.error( - f"{resource_name} {resource.name} has been in the terminating state for too long, skipping" - ) - undeletable_resources.append( - { - "resource": resource, - "type": resource_name, - "reason": f"{resource_name} has been in the terminating state for too long", - } - ) - continue - else: - logger.info( - f"{resource_name} {resource.name} is in the process of being deleted, adding to the back of the delete queue to recheck later" - ) - resources_to_delete.append(resource) - continue - - # If the index is not in a deleteable state, add it to the back of the delete queue - deleteable_states = ["Ready", "InitializationFailed"] - if state not in deleteable_states: - is_deletable_retries.increment(resource.name) - if is_deletable_retries.is_maxed_out(resource.name): - attempts = is_deletable_retries.get_count(resource.name) - logger.error( - f"{resource_name} {resource.name} did not enter a deleteable state after {attempts} attempts, skipping" - ) - undeletable_resources.append( - { - "resource": resource, - "type": resource_name, - "reason": f"Not in a deleteable state after {attempts} attempts", - } - ) - continue - else: - logger.info( - f"{resource_name} {resource.name} state {state} is not deleteable, adding to the back of the delete queue" - ) - resources_to_delete.append(resource) - continue - - try: - logger.info(f"Attempting deleting of {resource_name} {resource.name}") - delete_func(name=resource.name) - logger.info(f"Successfully deleted {resource_name} {resource.name}") - except Exception as e: - logger.error(f"Error deleting {resource_name} {resource.name}: {e}") - failed_delete_retries.increment(resource.name) - if failed_delete_retries.is_maxed_out(resource.name): - attempts = failed_delete_retries.get_count(resource.name) - logger.error( - f"Failed to delete {resource_name} {resource.name} after {attempts} attempts, skipping" - ) - undeletable_resources.append( - { - "resource": resource, - "type": resource_name, - "reason": f"Failed to delete after {attempts} attempts", - } - ) - continue - else: - logger.info( - f"{resource_name} {resource.name} has been returned to the back of the delete queue" - ) - resources_to_delete.append(resource) - continue - - if len(undeletable_resources) > 0: - logger.error( - f"There were {len(undeletable_resources)} {self.pluralize(resource_name)} that were not deleted" - ) - for item in undeletable_resources: - logger.error( - f"{resource_name} {item['resource'].name} was not deleted because {item['reason']}" - ) - self.undeleteable_resources.append(item) - else: - logger.info(f"All {self.pluralize(resource_name)} were deleted successfully") - - def delete_all_indexes(self): - index_list = self.pc.db.index.list() - if len(index_list) == 0: - logger.info("No indexes to delete") - return - - index_with_deletion_protection = [ - index for index in index_list if index.deletion_protection == "enabled" - ] - for index in index_with_deletion_protection: - logger.info(f"Disabling deletion protection for Index {index.name}") - time.sleep(self.sleep_interval) - try: - self.pc.db.index.configure(name=index.name, deletion_protection="disabled") - except Exception as e: - logger.error(f"Error disabling deletion protection for Index {index.name}: {e}") - self.undeleteable_resources.append( - { - "resource": index, - "type": "index", - "reason": f"Failed to disable deletion protection: {e}", - } - ) - - def get_state_func(name): - desc = self.pc.db.index.describe(name=name) - return desc.status.state - - return self._delete_all_of_resource( - resource_name="index", - list_func=self.pc.db.index.list, - delete_func=self.pc.db.index.delete, - get_state_func=get_state_func, - ) - - def delete_all_collections(self): - def get_state_func(name): - desc = self.pc.db.collection.describe(name=name) - return desc["status"] - - return self._delete_all_of_resource( - resource_name="collection", - list_func=self.pc.db.collection.list, - delete_func=self.pc.db.collection.delete, - get_state_func=get_state_func, - ) - - def delete_all_backups(self): - def _get_backup_by_name(name): - for backup in self.pc.db.backup.list(): - if backup.name == name: - return backup - raise Exception(f"Backup {name} not found") - - def delete_func(name): - backup = _get_backup_by_name(name) - return self.pc.db.backup.delete(backup_id=backup.backup_id) - - def get_state_func(name): - backup = _get_backup_by_name(name) - return backup.status - - return self._delete_all_of_resource( - resource_name="backup", - list_func=self.pc.db.backup.list, - delete_func=delete_func, - get_state_func=get_state_func, - ) - - def _cleanup_all(self): - self.undeleteable_resources = [] - self.delete_all_backups() - self.delete_all_collections() - self.delete_all_indexes() - - def cleanup_all(self): - self._cleanup_all() - - if len(self.undeleteable_resources) > 0: - logger.info( - f"There were {len(self.undeleteable_resources)} undeleteable resources, retrying in 60 seconds" - ) - time.sleep(60) - self._cleanup_all() - - if len(self.undeleteable_resources) > 0: - logger.info( - f"There were {len(self.undeleteable_resources)} undeleteable resources, retrying in 120 seconds" - ) - time.sleep(120) - self._cleanup_all() - - if len(self.undeleteable_resources) > 0: - logger.info( - f"There were {len(self.undeleteable_resources)} undeleteable resources, retrying in 240 seconds" - ) - time.sleep(240) - self._cleanup_all() - - if len(self.undeleteable_resources) > 0: - logger.info( - f"There were {len(self.undeleteable_resources)} undeleteable resources, retrying in 240 seconds" - ) - time.sleep(240) - self._cleanup_all() - - if len(self.undeleteable_resources) > 0: - logger.error( - f"There were {len(self.undeleteable_resources)} undeleteable resources, giving up" - ) - raise Exception( - f"There were {len(self.undeleteable_resources)} undeleteable resources, giving up" - ) - - if __name__ == "__main__": from pinecone import __version__ @@ -388,25 +15,35 @@ def cleanup_all(self): os.environ.get("PINECONE_SERVICE_ACCOUNT_CLIENT_ID") is not None and os.environ.get("PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET") is not None ): - admin_api = AdminAPI( - os.environ.get("PINECONE_SERVICE_ACCOUNT_CLIENT_ID"), - os.environ.get("PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET"), + admin_api = Admin( + client_id=os.environ.get("PINECONE_SERVICE_ACCOUNT_CLIENT_ID"), + client_secret=os.environ.get("PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET"), ) else: raise Exception( "PINECONE_SERVICE_ACCOUNT_CLIENT_ID and PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET must be set" ) - projects = admin_api.list_projects() - for project in projects["data"]: - project_name = project["name"] - donotdelete = ["python-plugin-embeddings", "pinecone-python-client"] + donotdelete = ["python-plugin-embeddings", "pinecone-python-client"] + projects = admin_api.projects.list() + for project in projects.data: + project_name = project.name + if project_name.startswith("python") or project_name in donotdelete: logger.info(f"=== Cleaning up project {project_name} ===") - api_key = admin_api.create_api_key(project["id"])["value"] - ProjectEraser(api_key).cleanup_all() + api_key_response = admin_api.api_keys.create(project_id=project.id, name="ci-cleanup") + + # force_delete=True overrides deletion protection. This seems too + # risky to include in the Admin project delete function for + # end users, so we do this extra delete step separately. + eraser = _ProjectEraser(api_key=api_key_response.value) + eraser.delete_all_indexes(force_delete=True) + eraser.delete_all_collections() + eraser.delete_all_backups() + + admin_api.api_keys.delete(api_key_id=api_key_response.key.id) if project_name.startswith("python") and project_name not in donotdelete: logger.info(f"=== Deleting project {project_name} ===") - admin_api.delete_project(project["id"]) + admin_api.projects.delete(project_id=project.id) time.sleep(10) diff --git a/.github/actions/project-create/script.py b/.github/actions/project-create/script.py index f1f9e40e..0a30587b 100644 --- a/.github/actions/project-create/script.py +++ b/.github/actions/project-create/script.py @@ -1,8 +1,7 @@ -import json import os -import urllib3 import logging from datetime import datetime +from pinecone import Admin logging.basicConfig( level=logging.DEBUG, format="%(levelname)-8s | %(name)s:%(lineno)d | %(message)s" @@ -27,91 +26,6 @@ def mask(value): print(f"::add-mask::{value}") -class AdminAPI: - def __init__(self, client_id, client_secret): - self.http = urllib3.PoolManager() - self.token = None - self.token = self._get_token(client_id, client_secret) - - def _request(self, method, url, headers={}, body_dict=None): - logger.info(f"Requesting {method} {url} with body {body_dict}") - api_version = os.environ.get("API_VERSION", "2025-04") - default_headers = { - "X-Pinecone-Api-Version": api_version, - "Content-Type": "application/json", - "Accept": "application/json", - } - if self.token is not None: - default_headers["Authorization"] = f"Bearer {self.token}" - headers = {**default_headers, **headers} - - args = {"method": method, "url": url, "headers": headers} - if body_dict is not None: - args["body"] = json.dumps(body_dict) - response = self.http.request(**args) - - logger.info(f"Response Status: {response.status}") - if response.status >= 400: - raise Exception( - f"Request failed with status {response.status}: {response.data.decode('utf-8')}" - ) - - if response is None or response.data is None or response.data == b"": - return None - return json.loads(response.data.decode("utf-8")) - - def _get_token(self, client_id, client_secret): - response = self._request( - "POST", - "https://login.pinecone.io/oauth/token", - body_dict={ - "grant_type": "client_credentials", - "client_id": client_id, - "client_secret": client_secret, - "audience": "https://api.pinecone.io/", - }, - ) - - token = response["access_token"] - mask(token) - return token - - def create_project(self, project_name, max_pods): - response = self._request( - "POST", - "https://api.pinecone.io/admin/projects", - body_dict={"name": project_name, "max_pods": max_pods}, - ) - return response - - def create_api_key(self, project_id): - response = self._request( - "POST", - f"https://api.pinecone.io/admin/projects/{project_id}/api-keys", - body_dict={"name": "ci-key"}, - ) - return response - - def list_projects(self): - response = self._request("GET", "https://api.pinecone.io/admin/projects") - return response - - def describe_project(self, project_id): - response = self._request("GET", f"https://api.pinecone.io/admin/projects/{project_id}") - return response - - def delete_project(self, project_id): - response = self._request("DELETE", f"https://api.pinecone.io/admin/projects/{project_id}") - return response - - def get_project_id(self, project_name): - project_list = self.list_projects()["data"] - for project in project_list: - if project["name"] == project_name: - return project["id"] - return None - - def main(): client_id = os.getenv("PINECONE_SERVICE_ACCOUNT_CLIENT_ID") client_secret = os.getenv("PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET") @@ -121,12 +35,12 @@ def main(): if client_secret is None: raise Exception("PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET must be set") - admin_api = AdminAPI(client_id, client_secret) + admin_api = Admin(client_id, client_secret) project_name = generate_project_name() max_pods = int(os.getenv("MAX_PODS", 1)) - project_id = admin_api.create_project(project_name, max_pods)["id"] - project_api_key = admin_api.create_api_key(project_id)["value"] + project = admin_api.project.create(name=project_name, max_pods=max_pods) + project_api_key = admin_api.api_key.create(project_id=project.id).value mask(project_api_key) output_file = os.environ.get("GITHUB_OUTPUT", None) @@ -135,7 +49,7 @@ def main(): else: with open(output_file, "a") as f: f.write(f"project_name={project_name}\n") - f.write(f"project_id={project_id}\n") + f.write(f"project_id={project.id}\n") f.write(f"project_api_key={project_api_key}\n") diff --git a/.github/actions/project-delete/action.yml b/.github/actions/project-delete/action.yml index 4dc297e2..6f6bb6d6 100644 --- a/.github/actions/project-delete/action.yml +++ b/.github/actions/project-delete/action.yml @@ -42,13 +42,6 @@ runs: encrypted_secret: ${{ inputs.encrypted_project_api_key }} encryption_key: ${{ inputs.FERNET_ENCRYPTION_KEY }} - - name: Delete project resources - id: delete-project-resources - shell: bash - run: python3 ./.github/actions/project-delete/delete-resources.py - env: - PINECONE_API_KEY: ${{ steps.decrypt-project-api-key.outputs.decrypted_secret }} - - name: Delete project id: delete-project shell: bash diff --git a/.github/actions/project-delete/delete-project.py b/.github/actions/project-delete/delete-project.py index edc28434..8897e1d2 100644 --- a/.github/actions/project-delete/delete-project.py +++ b/.github/actions/project-delete/delete-project.py @@ -1,7 +1,8 @@ -import json import os -import urllib3 import logging +import time +from pinecone import Admin +from pinecone.admin.project_eraser import _ProjectEraser logging.basicConfig( level=logging.DEBUG, format="%(levelname)-8s | %(name)s:%(lineno)d | %(message)s" @@ -14,61 +15,6 @@ def mask(value): print(f"::add-mask::{value}") -class AdminAPI: - def __init__(self, client_id, client_secret): - self.http = urllib3.PoolManager() - self.token = None - self.token = self._get_token(client_id, client_secret) - - def _request(self, method, url, headers={}, body_dict=None): - logger.info(f"Requesting {method} {url} with body {body_dict}") - api_version = os.environ.get("API_VERSION", "2025-04") - default_headers = { - "X-Pinecone-Api-Version": api_version, - "Content-Type": "application/json", - "Accept": "application/json", - } - if self.token is not None: - default_headers["Authorization"] = f"Bearer {self.token}" - headers = {**default_headers, **headers} - - args = {"method": method, "url": url, "headers": headers} - if body_dict is not None: - args["body"] = json.dumps(body_dict) - response = self.http.request(**args) - - logger.info(f"Response Status: {response.status}") - if response.status >= 400: - raise Exception( - f"Request failed with status {response.status}: {response.data.decode('utf-8')}" - ) - - if response is None or response.data is None or response.data == b"": - return None - - return json.loads(response.data.decode("utf-8")) - - def _get_token(self, client_id, client_secret): - response = self._request( - "POST", - "https://login.pinecone.io/oauth/token", - body_dict={ - "grant_type": "client_credentials", - "client_id": client_id, - "client_secret": client_secret, - "audience": "https://api.pinecone.io/", - }, - ) - - token = response["access_token"] - mask(token) - return token - - def delete_project(self, project_id): - response = self._request("DELETE", f"https://api.pinecone.io/admin/projects/{project_id}") - return response - - client_id = os.getenv("PINECONE_SERVICE_ACCOUNT_CLIENT_ID") client_secret = os.getenv("PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET") @@ -77,10 +23,28 @@ def delete_project(self, project_id): if client_secret is None: raise Exception("PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET must be set") -admin_api = AdminAPI(client_id, client_secret) +admin = Admin(client_id=client_id, client_secret=client_secret) project_id = os.getenv("PROJECT_ID") if project_id is None: raise Exception("PROJECT_ID must be set") -admin_api.delete_project(project_id) +key_response = admin.api_keys.create(project_id=project_id, name="ci-cleanup") + +eraser = _ProjectEraser(api_key=key_response.value) + +done = False +retries = 5 +while not done and retries > 0: + try: + eraser.delete_all_indexes(force_delete=True) + eraser.delete_all_collections() + eraser.delete_all_backups() + done = True + except Exception as e: + logger.error(f"Error deleting project resources: {e}") + time.sleep(10) + retries -= 1 + +admin.api_keys.delete(api_key_id=key_response.key.id) +admin.projects.delete(project_id=project_id) diff --git a/.github/workflows/testing-integration.yaml b/.github/workflows/testing-integration.yaml index 189f5dd9..46d773c2 100644 --- a/.github/workflows/testing-integration.yaml +++ b/.github/workflows/testing-integration.yaml @@ -96,3 +96,25 @@ jobs: encryption_key: '${{ secrets.FERNET_ENCRYPTION_KEY }}' test_suite: '${{ matrix.test_suite }}' use_grpc: 'true' + + admin: + name: admin ${{ matrix.python_version }} + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python_version: ${{ fromJson(inputs.python_versions_json) }} + test_suite: + - admin + steps: + - uses: actions/checkout@v4 + - name: Setup Poetry + uses: ./.github/actions/setup-poetry + with: + include_grpc: false + include_asyncio: false + python_version: '${{ matrix.python_version }}' + - run: poetry run pytest tests/integration/${{ matrix.test_suite }} --retries 2 --retry-delay 35 -s -vv --log-cli-level=DEBUG + env: + PINECONE_CLIENT_ID: ${{ secrets.PINECONE_SERVICE_ACCOUNT_CLIENT_ID }} + PINECONE_CLIENT_SECRET: ${{ secrets.PINECONE_SERVICE_ACCOUNT_CLIENT_SECRET }} diff --git a/codegen/build-oas.sh b/codegen/build-oas.sh index d4f83784..5976d7b9 100755 --- a/codegen/build-oas.sh +++ b/codegen/build-oas.sh @@ -6,7 +6,7 @@ version=$1 # e.g. 2025-01 destination="pinecone/core/openapi" -modules=("db_control" "db_data" "inference") +modules=("db_control" "db_data" "inference" "oauth" "admin") py_module_name="core" template_dir="codegen/python-oas-templates/templates5.2.0" diff --git a/docs/admin.rst b/docs/admin.rst new file mode 100644 index 00000000..c2edf1d1 --- /dev/null +++ b/docs/admin.rst @@ -0,0 +1,23 @@ +================ +Admin API +================ + +The Admin API is used to manage your Pinecone organization and projects. + +You will need to create a `Pinecone service account `_ +to use the Pinecone Admin API. + +.. autoclass:: pinecone.Admin + :members: + +Projects +-------- + +.. autoclass:: pinecone.admin.resources.ProjectResource + :members: + +API Keys +-------- + +.. autoclass:: pinecone.admin.resources.ApiKeyResource + :members: diff --git a/docs/conf.py b/docs/conf.py index d20af8b4..eceab98d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -16,12 +16,13 @@ "sphinx.ext.napoleon", "sphinx.ext.coverage", "sphinx.ext.autodoc.typehints", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", "myst_parser", ] # -- HTML Configuration ------------------------------------------------- -html_theme = "alabaster" html_theme_options = { "logo": "pinecone-logo.svg", "description": "Pinecone Python SDK", @@ -40,3 +41,7 @@ "Pinecone Console": "https://app.pinecone.io", }, } + +# Add support for code block highlighting +highlight_language = "python" +pygments_style = "sphinx" diff --git a/docs/index.rst b/docs/index.rst index f64573b3..97062489 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -6,6 +6,7 @@ rest asyncio grpc + admin .. toctree:: :maxdepth: 5 diff --git a/pinecone/__init__.py b/pinecone/__init__.py index 2e55fe84..0d387bb4 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -6,6 +6,7 @@ from .deprecation_warnings import * from .pinecone import Pinecone from .pinecone_asyncio import PineconeAsyncio +from .admin import Admin from .exceptions import ( PineconeException, PineconeApiTypeError, @@ -140,6 +141,7 @@ # Primary client classes "Pinecone", "PineconeAsyncio", + "Admin", # All lazy-loaded types *list(_LAZY_IMPORTS.keys()), # Exception classes diff --git a/pinecone/admin/__init__.py b/pinecone/admin/__init__.py new file mode 100644 index 00000000..a8164800 --- /dev/null +++ b/pinecone/admin/__init__.py @@ -0,0 +1,3 @@ +from .admin import Admin + +__all__ = ["Admin"] diff --git a/pinecone/admin/admin.py b/pinecone/admin/admin.py new file mode 100644 index 00000000..5276f018 --- /dev/null +++ b/pinecone/admin/admin.py @@ -0,0 +1,229 @@ +from pinecone.config import OpenApiConfiguration +from pinecone.openapi_support import ApiClient +from pinecone.core.openapi.oauth.apis import OAuthApi +from pinecone.core.openapi.oauth.models import TokenRequest +from typing import Optional, Dict +from pinecone.utils import get_user_agent +from pinecone.config import Config +import os +from copy import deepcopy + + +class Admin: + """ + A class for accessing the Pinecone Admin API. + + A prerequisite for using this class is to have a `service account `_. To create a service + account, visit the `Pinecone web console `_ and navigate to + the ``Access > Service Accounts`` section. + + After creating a service account, you will be provided with a client ID and secret. + These values can be passed to the Admin constructor or set the ``PINECONE_CLIENT_ID`` + and ``PINECONE_CLIENT_SECRET`` environment variables. + + + :param client_id: The client ID for the Pinecone API. To obtain a client ID and secret, + you must create a service account via the Pinecone web console. This value can be + passed using keyword arguments or set the ``PINECONE_CLIENT_ID`` environment variable. + :type client_id: Optional[str] + :param client_secret: The client secret for the Pinecone API. To obtain a client ID + and secret, you must create a service account via the Pinecone web console. This value + can be passed using keyword arguments or set the ``PINECONE_CLIENT_SECRET`` environment + variable. + :type client_secret: Optional[str] + :param additional_headers: Additional headers to use for the Pinecone API. This is a + dictionary of key-value pairs. This is primarily used for internal testing + purposes. + :type additional_headers: Optional[Dict[str, str]] + """ + + def __init__( + self, + client_id: Optional[str] = None, + client_secret: Optional[str] = None, + additional_headers: Optional[Dict[str, str]] = None, + ): + """ + Initialize the ``Admin`` class. + + :param client_id: The client ID for the Pinecone API. To obtain a client ID and secret, + you must create a service account via the Pinecone web console. This value can be + passed using keyword arguments or set the ``PINECONE_CLIENT_ID`` environment variable. + :type client_id: Optional[str] + :param client_secret: The client secret for the Pinecone API. To obtain a client ID + and secret, you must create a service account via the Pinecone web console. This value + can be passed using keyword arguments or set the ``PINECONE_CLIENT_SECRET`` environment + variable. + :type client_secret: Optional[str] + :param additional_headers: Additional headers to use for the Pinecone API. This is a + dictionary of key-value pairs. This is primarily used for internal testing + purposes. + :type additional_headers: Optional[Dict[str, str]] + """ + + if client_id is not None: + self._client_id = client_id + elif os.getenv("PINECONE_CLIENT_ID") is not None: + self._client_id = os.getenv("PINECONE_CLIENT_ID") + else: + raise ValueError( + "client_id is not set. Pass client_id to the Admin constructor or set the PINECONE_CLIENT_ID environment variable." + ) + if client_secret is not None: + self._client_secret = client_secret + elif os.getenv("PINECONE_CLIENT_SECRET") is not None: + self._client_secret = os.getenv("PINECONE_CLIENT_SECRET") + else: + raise ValueError( + "client_secret is not set. Pass client_secret to the Admin constructor or set the PINECONE_CLIENT_SECRET environment variable." + ) + if additional_headers is None: + additional_headers = {} + + _oauth_api_config = OpenApiConfiguration(host="https://login.pinecone.io") + + _oauth_api_client = ApiClient(configuration=_oauth_api_config) + _oauth_api_client.set_default_header("X-Pinecone-Api-Version", "2025-04") + for key, value in additional_headers.items(): + _oauth_api_client.set_default_header(key, value) + _oauth_api_client.user_agent = get_user_agent(Config()) + + _oauth_api = OAuthApi(_oauth_api_client) + token_request = TokenRequest( + client_id=self._client_id, + client_secret=self._client_secret, + grant_type="client_credentials", + audience="https://api.pinecone.io/", + ) + token_response = _oauth_api.get_token(token_request) + self._token = token_response.access_token + + _child_api_config = deepcopy(_oauth_api_config) + _child_api_config.host = "https://api.pinecone.io" + _child_api_config.api_key_prefix = {"BearerAuth": "Bearer"} + _child_api_config.api_key = {"BearerAuth": self._token} + + self._child_api_client = ApiClient(configuration=_child_api_config) + self._child_api_client.set_default_header("X-Pinecone-Api-Version", "2025-04") + for key, value in additional_headers.items(): + self._child_api_client.set_default_header(key, value) + self._child_api_client.user_agent = get_user_agent(Config()) + + # Lazily initialize resources + self._project = None + self._api_key = None + + @property + def project(self): + """A namespace for project-related operations + + Alias for :func:`projects`. + + To learn about all project-related operations, see :func:`pinecone.admin.resources.ProjectResource`. + + Examples + -------- + + .. code-block:: python + :caption: Creating a project + + from pinecone import Admin + + # Using environment variables to pass PINECONE_CLIENT_ID and PINECONE_CLIENT_SECRET + admin = Admin() + + # Create a project with no quota for pod indexes + admin.project.create( + name="my-project" + max_pods=0 + ) + + .. code-block:: python + :caption: Listing all projects + + from pinecone import Admin + + admin = Admin() + admin.projects.list() + + .. code-block:: python + :caption: Deleting a project + + from pinecone import Admin + + admin = Admin() + project = admin.project.get(name="my-project") + admin.project.delete(project_id=project.id) + """ + if self._project is None: + from pinecone.admin.resources import ProjectResource + + self._project = ProjectResource(self._child_api_client) + return self._project + + @property + def projects(self): + """Alias for :func:`project`""" + return self.project + + @property + def api_key(self): + """A namespace for api key-related operations + + Alias for :func:`api_keys`. + + To learn about all api key-related operations, see :func:`pinecone.admin.resources.ApiKeyResource`. + + Examples + -------- + + .. code-block:: python + :caption: Creating an API key + + from pinecone import Admin + + admin = Admin() + + project = admin.project.get(name="my-project") + + admin.api_key.create( + name="my-api-key", + project_id=project.id, + description="my-api-key-description", + roles=["ProjectEditor"] + ) + + .. code-block:: python + :caption: Listing all API keys for a project + + from pinecone import Admin + + admin = Admin() + project = admin.project.get(name="my-project") + admin.api_key.list(project_id=project.id) + + .. code-block:: python + :caption: Deleting an API key + + from pinecone import Admin + + admin = Admin() + project = admin.project.get(name="my-project") + + # List api keys for the project + keys_list = admin.api_key.list(project_id=project.id) + + # Delete the first api key in the list + admin.api_key.delete(api_key_id=keys_list[0].id) + + """ + if self._api_key is None: + from pinecone.admin.resources import ApiKeyResource + + self._api_key = ApiKeyResource(self._child_api_client) + return self._api_key + + @property + def api_keys(self): + """Alias for :func:`api_key`""" + return self.api_key diff --git a/.github/actions/project-delete/delete-resources.py b/pinecone/admin/project_eraser.py similarity index 78% rename from .github/actions/project-delete/delete-resources.py rename to pinecone/admin/project_eraser.py index 3544c68f..cb4473d0 100644 --- a/.github/actions/project-delete/delete-resources.py +++ b/pinecone/admin/project_eraser.py @@ -2,15 +2,11 @@ from pinecone import Pinecone, NotFoundException import time from collections import deque -import os -logging.basicConfig( - level=logging.DEBUG, format="%(levelname)-8s | %(name)s:%(lineno)4d | %(message)s" -) logger = logging.getLogger(__name__) -class RetryCounter: +class _RetryCounter: def __init__(self, max_retries): self.max_retries = max_retries self.counts = {} @@ -27,17 +23,22 @@ def is_maxed_out(self, key): return self.get_count(key) >= self.max_retries -class ProjectEraser: - def __init__(self, api_key): +class _ProjectEraser: + """ + This class is used to delete all resources within a project + """ + + def __init__(self, api_key, max_retries=5, sleep_interval=0.5): self.pc = Pinecone(api_key=api_key) # In situations where there are a lot of resources, we want to # slow down the rate of requests just to avoid any concerns about # rate limits - self.sleep_interval = 5 + self.sleep_interval = sleep_interval self.undeleteable_resources = [] + self.max_retries = max_retries - def pluralize(self, resource_name): + def _pluralize(self, resource_name): if resource_name.lower() == "index": return resource_name + "es" else: @@ -46,19 +47,19 @@ def pluralize(self, resource_name): def _delete_all_of_resource(self, resource_name, list_func, delete_func, get_state_func): resources_to_delete = deque(list_func()) if len(resources_to_delete) == 0: - logger.info(f"No {self.pluralize(resource_name)} to delete") + logger.info(f"No {self._pluralize(resource_name)} to delete") return - state_check_retries = RetryCounter(3) - failed_delete_retries = RetryCounter(3) - is_deletable_retries = RetryCounter(3) - is_terminating_retries = RetryCounter(10) + state_check_retries = _RetryCounter(self.max_retries) + failed_delete_retries = _RetryCounter(self.max_retries) + is_deletable_retries = _RetryCounter(self.max_retries) + is_terminating_retries = _RetryCounter(self.max_retries * 3) undeletable_resources = [] while len(resources_to_delete) > 0: logger.info( - f"There are {len(resources_to_delete)} {self.pluralize(resource_name)} left to delete" + f"There are {len(resources_to_delete)} {self._pluralize(resource_name)} left to delete" ) time.sleep(self.sleep_interval) @@ -165,7 +166,7 @@ def _delete_all_of_resource(self, resource_name, list_func, delete_func, get_sta if len(undeletable_resources) > 0: logger.error( - f"There were {len(undeletable_resources)} {self.pluralize(resource_name)} that were not deleted" + f"There were {len(undeletable_resources)} {self._pluralize(resource_name)} that were not deleted" ) for item in undeletable_resources: logger.error( @@ -173,9 +174,9 @@ def _delete_all_of_resource(self, resource_name, list_func, delete_func, get_sta ) self.undeleteable_resources.append(item) else: - logger.info(f"All {self.pluralize(resource_name)} were deleted successfully") + logger.info(f"All {self._pluralize(resource_name)} were deleted successfully") - def delete_all_indexes(self): + def delete_all_indexes(self, force_delete=False): index_list = self.pc.db.index.list() if len(index_list) == 0: logger.info("No indexes to delete") @@ -184,6 +185,14 @@ def delete_all_indexes(self): index_with_deletion_protection = [ index for index in index_list if index.deletion_protection == "enabled" ] + if not force_delete and len(index_with_deletion_protection) > 0: + logger.info( + "There are indexes with deletion protection enabled. You must disable deletion protection before the index can be deleted." + ) + raise Exception( + f"Indexes with deletion protection enabled cannot be deleted: {[i.name for i in index_with_deletion_protection]}" + ) + for index in index_with_deletion_protection: logger.info(f"Disabling deletion protection for Index {index.name}") time.sleep(self.sleep_interval) @@ -244,58 +253,11 @@ def get_state_func(name): get_state_func=get_state_func, ) - def _cleanup_all(self): - self.undeleteable_resources = [] - self.delete_all_backups() - self.delete_all_collections() - self.delete_all_indexes() - - def cleanup_all(self): - self._cleanup_all() - - if len(self.undeleteable_resources) > 0: - logger.info( - f"There were {len(self.undeleteable_resources)} undeleteable resources, retrying in 60 seconds" - ) - time.sleep(60) - self._cleanup_all() - + def retry_needed(self): if len(self.undeleteable_resources) > 0: logger.info( - f"There were {len(self.undeleteable_resources)} undeleteable resources, retrying in 120 seconds" + f"Retry needed for {len(self.undeleteable_resources)} undeleteable resources" ) - time.sleep(120) - self._cleanup_all() - - if len(self.undeleteable_resources) > 0: - logger.info( - f"There were {len(self.undeleteable_resources)} undeleteable resources, retrying in 240 seconds" - ) - time.sleep(240) - self._cleanup_all() - - if len(self.undeleteable_resources) > 0: - logger.info( - f"There were {len(self.undeleteable_resources)} undeleteable resources, retrying in 240 seconds" - ) - time.sleep(240) - self._cleanup_all() - - if len(self.undeleteable_resources) > 0: - logger.error( - f"There were {len(self.undeleteable_resources)} undeleteable resources, giving up" - ) - raise Exception( - f"There were {len(self.undeleteable_resources)} undeleteable resources, giving up" - ) - - -if __name__ == "__main__": - from pinecone import __version__ - - api_key = os.getenv("PINECONE_API_KEY") - if api_key is None: - raise Exception("PINECONE_API_KEY must be set") - - logger.info(f"Pinecone version: {__version__}") - ProjectEraser(api_key).cleanup_all() + return True + else: + return False diff --git a/pinecone/admin/resources/__init__.py b/pinecone/admin/resources/__init__.py new file mode 100644 index 00000000..d8b653f8 --- /dev/null +++ b/pinecone/admin/resources/__init__.py @@ -0,0 +1,4 @@ +from .project import ProjectResource +from .api_key import ApiKeyResource + +__all__ = ["ProjectResource", "ApiKeyResource"] diff --git a/pinecone/admin/resources/api_key.py b/pinecone/admin/resources/api_key.py new file mode 100644 index 00000000..2db36d4c --- /dev/null +++ b/pinecone/admin/resources/api_key.py @@ -0,0 +1,210 @@ +from typing import Optional, List +from pinecone.openapi_support import ApiClient +from pinecone.core.openapi.admin.apis import APIKeysApi +from pinecone.utils import require_kwargs, parse_non_empty_args +from pinecone.core.openapi.admin.models import CreateAPIKeyRequest + + +class ApiKeyResource: + """ + This class is used to create, delete, list, and fetch API keys. + + .. note:: + The class should not be instantiated directly. Instead, access this classes + methods through the :class:`pinecone.Admin` class's + :attr:`api_key` or :attr:`api_keys` attributes. + + .. code-block:: python + + from pinecone import Admin + + admin = Admin() + + project = admin.project.get(name='my-project-name') + api_keys = admin.api_keys.list(project_id=project.id) + """ + + def __init__(self, api_client: ApiClient): + self._api_keys_api = APIKeysApi(api_client=api_client) + + @require_kwargs + def list(self, project_id: str): + """ + List all API keys for a project. + + To find the ``project_id`` for your project, use + :func:`pinecone.admin.resources.ProjectResource.list` + or :func:`pinecone.admin.resources.ProjectResource.get`. + + The value of the API key is not returned. The value is only returned + when a new API key is being created. + + :param project_id: The project_id of the project to list API keys for. + :type project_id: str + :return: An object with a list of API keys. + :rtype: {"data": [APIKey]} + + Examples + -------- + + .. code-block:: python + :caption: List all API keys for a project + :emphasize-lines: 9 + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + project = admin.project.get(name='my-project-name') + + api_keys = admin.api_key.list(project_id=project.id) + for api_key in api_keys.data: + print(api_key.id) + print(api_key.name) + print(api_key.description) + print(api_key.roles) + """ + return self._api_keys_api.list_api_keys(project_id=project_id) + + @require_kwargs + def fetch(self, api_key_id: str): + """ + Fetch an API key by ``api_key_id``. + + The value of the API key is not returned. The value is only returned + when a new API key is being created. + + :param api_key_id: The id of the API key to fetch. + :type api_key_id: str + :return: The API key. + :rtype: APIKey + + Examples + -------- + + .. code-block:: python + :caption: Fetch an API key by ``api_key_id`` + :emphasize-lines: 7 + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + api_key = admin.api_key.fetch(api_key_id='my-api-key-id') + print(api_key.id) + print(api_key.name) + print(api_key.description) + print(api_key.roles) + print(api_key.created_at) + + """ + return self._api_keys_api.fetch_api_key(api_key_id=api_key_id) + + @require_kwargs + def get(self, api_key_id: str): + """Alias for :func:`fetch`""" + return self.fetch(api_key_id=api_key_id) + + @require_kwargs + def describe(self, api_key_id: str): + """Alias for :func:`fetch`""" + return self.fetch(api_key_id=api_key_id) + + @require_kwargs + def delete(self, api_key_id: str): + """ + Delete an API key by ``api_key_id``. + + :param api_key_id: The id of the API key to delete. + :type api_key_id: str + :return: ``None`` + + Examples + -------- + + .. code-block:: python + :caption: Delete an API key by api_key_id + :emphasize-lines: 7 + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + admin.api_key.delete(api_key_id='my-api-key-id') + + try: + admin.api_key.fetch(api_key_id='my-api-key-id') + except NotFoundException: + print("API key deleted successfully") + + """ + return self._api_keys_api.delete_api_key(api_key_id=api_key_id) + + @require_kwargs + def create( + self, + project_id: str, + name: str, + description: Optional[str] = None, + roles: Optional[List[str]] = None, + ): + """ + Create an API key for a project. + + The value of the API key is returned in the create response. + This is the only time the value is returned. + + :param project_id: The project_id of the project to create the API key for. + :type project_id: str + :param name: The name of the API key. + :type name: str + :param description: The description of the API key. + :type description: Optional[str] + :param roles: The roles of the API key. Available roles include: + ``ProjectEditor``, ``ProjectViewer``, ``ControlPlaneEditor``, + ``ControlPlaneViewer``, ``DataPlaneEditor``, ``DataPlaneViewer`` + :type roles: Optional[List[str]] + :return: The created API key object and value. + :rtype: {"key": APIKey, "value": str} + + Examples + -------- + + .. code-block:: python + :caption: Create an API key for a project + :emphasize-lines: 9-14 + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + project = admin.project.get(name='my-project-name') + + api_key_response = admin.api_key.create( + project_id=project.id, + name='ci-key', + description='Key for CI testing', + roles=['ProjectEditor'] + ) + api_key = api_key_response.key + print(api_key.id) + print(api_key.name) + print(api_key.description) + print(api_key.roles) + + api_key_value = api_key_response.value + print(api_key_value) + """ + args = [("name", name), ("description", description), ("roles", roles)] + create_api_key_request = CreateAPIKeyRequest(**parse_non_empty_args(args)) + return self._api_keys_api.create_api_key( + project_id=project_id, create_api_key_request=create_api_key_request + ) diff --git a/pinecone/admin/resources/project.py b/pinecone/admin/resources/project.py new file mode 100644 index 00000000..cc7c8ca7 --- /dev/null +++ b/pinecone/admin/resources/project.py @@ -0,0 +1,502 @@ +from typing import Optional +from pinecone.exceptions import NotFoundException, PineconeException +from pinecone.openapi_support import ApiClient +from pinecone.core.openapi.admin.apis import ProjectsApi +from pinecone.utils import parse_non_empty_args, require_kwargs +from pinecone.core.openapi.admin.models import CreateProjectRequest, UpdateProjectRequest +import logging +import time + +logger = logging.getLogger(__name__) + + +class ProjectResource: + """ + This class is used to create, delete, list, fetch, and update projects. + + .. note:: + The class should not be instantiated directly. Instead, access this classes + methods through the :class:`pinecone.Admin` class's + :attr:`project` or :attr:`projects` attributes. + + .. code-block:: python + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + # Now call project methods on the projects namespace + project = admin.projects.create( + name="my-project", + max_pods=10, + force_encryption_with_cmek=False + ) + """ + + def __init__(self, api_client: ApiClient): + """ + Initialize the ProjectResource. + + .. warning:: + This class should not be instantiated directly. Instead, access this classes + methods through the :class:`pinecone.Admin` class's + :attr:`project` or :attr:`projects` attributes. + + :param api_client: The API client to use. + :type api_client: ApiClient + """ + self._projects_api = ProjectsApi(api_client=api_client) + self._api_client = api_client + + @require_kwargs + def list(self): + """ + List all projects in the organization. + + :return: An object with a list of projects. + :rtype: {"data": [Project]} + + .. code-block:: python + :caption: List all projects in the organization + :emphasize-lines: 8 + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + # List all projects in the organization + projects_response = admin.projects.list() + for project in projects_response.data: + print(project.id) + print(project.name) + print(project.max_pods) + print(project.force_encryption_with_cmek) + """ + return self._projects_api.list_projects() + + @require_kwargs + def fetch(self, project_id: Optional[str] = None, name: Optional[str] = None): + """ + Fetch a project by project_id or name. + + :param project_id: The project_id of the project to fetch. + :type project_id: str + :param name: The name of the project to fetch. + :type name: str + :return: The project. + :rtype: Project + + Examples + -------- + + .. code-block:: python + :caption: Fetch a project by project_id + :emphasize-lines: 7-9 + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + project = admin.projects.fetch( + project_id="42ca341d-43bf-47cb-9f27-e645dbfabea6" + ) + print(project.id) + print(project.name) + print(project.max_pods) + print(project.force_encryption_with_cmek) + print(project.organization_id) + print(project.created_at) + + .. code-block:: python + :caption: Fetch a project by name + :emphasize-lines: 7 + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + project = admin.projects.fetch(name="my-project-name") + print(project.id) + print(project.name) + print(project.max_pods) + print(project.force_encryption_with_cmek) + print(project.organization_id) + print(project.created_at) + """ + if project_id is not None and name is not None: + raise ValueError("Either project_id or name must be provided but not both") + elif project_id is None and name is None: + raise ValueError("Either project_id or name must be provided") + + if project_id is not None: + return self._projects_api.fetch_project(project_id=project_id) + else: + projects = self.list().data + projects = [project for project in projects if project.name == name] + if len(projects) == 0: + raise NotFoundException(f"Project with name '{name}' not found") + elif len(projects) > 1: + ids = [project.id for project in projects] + raise PineconeException( + f"Multiple projects found with name '{name}'. Please use project_id to fetch a specific project. Matching project ids: {ids}" + ) + else: + return projects[0] + + @require_kwargs + def get(self, project_id: Optional[str] = None, name: Optional[str] = None): + """Alias for :func:`fetch` + + Examples + -------- + + .. code-block:: python + :caption: Get a project by project_id + :emphasize-lines: 7-9 + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + project = admin.project.get( + project_id="42ca341d-43bf-47cb-9f27-e645dbfabea6" + ) + print(project.id) + print(project.name) + print(project.max_pods) + print(project.force_encryption_with_cmek) + """ + return self.fetch(project_id=project_id, name=name) + + @require_kwargs + def describe(self, project_id: Optional[str] = None, name: Optional[str] = None): + """Alias for :func:`fetch` + + Examples + -------- + + .. code-block:: python + :caption: Describe a project by project_id + :emphasize-lines: 7-9 + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + project = admin.project.describe( + project_id="42ca341d-43bf-47cb-9f27-e645dbfabea6" + ) + print(project.id) + print(project.name) + print(project.max_pods) + print(project.force_encryption_with_cmek) + """ + return self.fetch(project_id=project_id, name=name) + + @require_kwargs + def exists(self, project_id: Optional[str] = None, name: Optional[str] = None): + """ + Check if a project exists by project_id or name. + + :param project_id: The project_id of the project to check. + :type project_id: str + :param name: The name of the project to check. + :type name: str + :return: True if the project exists, False otherwise. + :rtype: bool + + :raises ValueError: If both project_id and name are provided. + + Examples + -------- + + .. code-block:: python + :caption: Check if a project exists by project name + :emphasize-lines: 8 + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + project_name = "my-project-name" + if admin.project.exists(name=project_name): + print(f"Project {project_name} exists") + else: + admin.project.create( + name=project_name, + max_pods=10, + force_encryption_with_cmek=False + ) + + .. code-block:: python + :caption: Check if a project exists by project_id + :emphasize-lines: 8 + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + project_id = "42ca341d-43bf-47cb-9f27-e645dbfabea6" + if admin.project.exists(project_id=project_id): + print(f"Project {project_id} exists") + else: + print(f"Project {project_id} does not exist") + """ + if project_id is not None and name is not None: + raise ValueError("Either project_id or name must be provided but not both") + elif project_id is None and name is None: + raise ValueError("Either project_id or name must be provided") + + try: + args = [("project_id", project_id), ("name", name)] + self.fetch(**parse_non_empty_args(args)) + return True + except NotFoundException: + return False + + @require_kwargs + def create( + self, + name: str, + max_pods: Optional[int] = None, + force_encryption_with_cmek: Optional[bool] = None, + ): + """ + Create a project. + + :param name: The name of the project to create. + :type name: str + :param max_pods: The maximum number of pods for the project. + :type max_pods: int + :param force_encryption_with_cmek: Whether to force encryption with CMEK. + :type force_encryption_with_cmek: bool + :return: The created project. + :rtype: Project + + Examples + -------- + + .. code-block:: python + :caption: Create a project + :emphasize-lines: 7-11 + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + project = admin.project.create( + name="my-project-name", + max_pods=10, + force_encryption_with_cmek=False + ) + + print(project.id) + print(project.name) + print(project.organization_id) + print(project.max_pods) + print(project.force_encryption_with_cmek) + print(project.created_at) + + """ + args = [ + ("name", name), + ("max_pods", max_pods), + ("force_encryption_with_cmek", force_encryption_with_cmek), + ] + create_request = CreateProjectRequest(**parse_non_empty_args(args)) + return self._projects_api.create_project(create_project_request=create_request) + + @require_kwargs + def update( + self, + project_id: str, + name: Optional[str] = None, + max_pods: Optional[int] = None, + force_encryption_with_cmek: Optional[bool] = None, + ): + """ + Update a project. + + :param project_id: The project_id of the project to update. + :type project_id: str + :param name: The name of the project to update. + :type name: str + :param max_pods: The maximum number of pods for the project. + :type max_pods: int + :param force_encryption_with_cmek: Whether to force encryption with CMEK. + :type force_encryption_with_cmek: bool + :return: The updated project. + :rtype: Project + + Examples + -------- + + .. code-block:: python + :caption: Update a project by project_id + :emphasize-lines: 10-13, 16-19 + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + project = admin.project.get(name='my-project-name') + + # Update max pods to 10 + project = admin.project.update( + project_id=project.id, + max_pods=10 + ) + + # Update force_encryption_with_cmek to True + project = admin.project.update( + project_id=project.id, + force_encryption_with_cmek=True + ) + + """ + args = [ + ("name", name), + ("max_pods", max_pods), + ("force_encryption_with_cmek", force_encryption_with_cmek), + ] + update_request = UpdateProjectRequest(**parse_non_empty_args(args)) + return self._projects_api.update_project( + project_id=project_id, update_project_request=update_request + ) + + @require_kwargs + def delete( + self, + project_id: str, + delete_all_indexes: bool = False, + delete_all_collections: bool = False, + delete_all_backups: bool = False, + ): + """ + + .. warning:: + Deleting a project is a permanent and irreversible operation. + Please be very sure you want to delete the project and everything + associated with it before calling this function. + + + Projects can only be deleted if they are empty. The delete operation + will fail if the project contains any resources such as indexes, + collections, or backups. + + If you pass additional options such as ``delete_all_indexes=True``, + ``delete_all_collections=True``, or ``delete_all_backups=True``, this function + will attempt to delete all of these resources before deleting the project itself. + **These deletions are permanent and cannot be undone.** + + :param project_id: The project_id of the project to delete. + :type project_id: str + :param delete_all_indexes: Attempt to delete all indexes associated with the project. + :type delete_all_indexes: bool + :param delete_all_collections: Attempt to delete all collections associated with the project. + :type delete_all_collections: bool + :param delete_all_backups: Attempt to delete all backups associated with the project. + :type delete_all_backups: bool + :return: ``None`` + + Examples + -------- + + .. code-block:: python + :caption: Delete a project by project_id + :emphasize-lines: 9 + + from pinecone import Admin + + # Credentials read from PINECONE_CLIENT_ID and + # PINECONE_CLIENT_SECRET environment variables + admin = Admin() + + project = admin.project.get(name='my-project-name') + + admin.project.delete(project_id=project.id) + + .. code-block:: python + :caption: Delete a project that still contains indexes, collections, and backups + :emphasize-lines: 7-12 + + from pinecone import Admin + + admin = Admin() + + project = admin.project.get(name='my-project-name') + + admin.project.delete( + project_id=project.id, + delete_all_indexes=True, + delete_all_collections=True, + delete_all_backups=True + ) + + if not admin.project.exists(project_id=project.id): + print("Project deleted successfully") + else: + print("Project deletion failed") + """ + project = self.get(project_id=project_id) + + if not (delete_all_indexes or delete_all_collections or delete_all_backups): + return self._projects_api.delete_project(project_id=project_id) + + from .api_key import ApiKeyResource + + api_key_resource = ApiKeyResource(self._api_client) + logger.info(f"Creating API key 'cleanup-key' for project {project.id}") + key_create_response = api_key_resource.create( + project_id=project.id, name="cleanup-key", roles=["ProjectEditor"] + ) + api_key = key_create_response.value + + try: + from ..project_eraser import _ProjectEraser + + done = False + retries = 0 + + while not done and retries < 5: + project_eraser = _ProjectEraser(api_key=api_key) + + if delete_all_collections: + project_eraser.delete_all_collections() + if delete_all_backups: + project_eraser.delete_all_backups() + if delete_all_indexes: + project_eraser.delete_all_indexes() + + done = not project_eraser.retry_needed() + retries += 1 + if not done: + logger.info( + f"Retrying deletion of resources for project {project.id}. There were {len(project_eraser.undeleteable_resources)} undeleteable resources" + ) + time.sleep(30) + finally: + logger.info(f"Deleting API key 'cleanup-key' for project {project.id}") + api_key_resource.delete(api_key_id=key_create_response.key.id) + + return self._projects_api.delete_project(project_id=project_id) diff --git a/pinecone/config/openapi_configuration.py b/pinecone/config/openapi_configuration.py index c3ce79a4..19610e63 100644 --- a/pinecone/config/openapi_configuration.py +++ b/pinecone/config/openapi_configuration.py @@ -382,6 +382,13 @@ def auth_settings(self): "key": "Api-Key", "value": self.get_api_key_with_prefix("ApiKeyAuth"), } + elif "BearerAuth" in self.api_key: + auth["BearerAuth"] = { + "type": "api_key", + "in": "header", + "key": "Authorization", + "value": self.get_api_key_with_prefix("BearerAuth"), + } return auth def get_host_settings(self): diff --git a/pinecone/core/openapi/admin/__init__.py b/pinecone/core/openapi/admin/__init__.py new file mode 100644 index 00000000..18e8567a --- /dev/null +++ b/pinecone/core/openapi/admin/__init__.py @@ -0,0 +1,30 @@ +# flake8: noqa + +""" +Pinecone Admin API + +Provides an API for managing a Pinecone organization and its resources. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +__version__ = "1.0.0" + +# import ApiClient +from pinecone.openapi_support.api_client import ApiClient + +# import Configuration +from pinecone.config.openapi_configuration import Configuration + +# import exceptions +from pinecone.openapi_support.exceptions import PineconeException +from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from pinecone.openapi_support.exceptions import PineconeApiTypeError +from pinecone.openapi_support.exceptions import PineconeApiValueError +from pinecone.openapi_support.exceptions import PineconeApiKeyError +from pinecone.openapi_support.exceptions import PineconeApiException + +API_VERSION = "2025-04" diff --git a/pinecone/core/openapi/admin/api/__init__.py b/pinecone/core/openapi/admin/api/__init__.py new file mode 100644 index 00000000..d216833d --- /dev/null +++ b/pinecone/core/openapi/admin/api/__init__.py @@ -0,0 +1,3 @@ +# do not import all apis into this module because that uses a lot of memory and stack frames +# if you need the ability to import all apis from one package, import them with +# from pinecone.core.openapi.admin.apis import APIKeysApi diff --git a/pinecone/core/openapi/admin/api/api_keys_api.py b/pinecone/core/openapi/admin/api/api_keys_api.py new file mode 100644 index 00000000..867b3db8 --- /dev/null +++ b/pinecone/core/openapi/admin/api/api_keys_api.py @@ -0,0 +1,590 @@ +""" +Pinecone Admin API + +Provides an API for managing a Pinecone organization and its resources. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support import ApiClient, AsyncioApiClient +from pinecone.openapi_support.endpoint_utils import ( + ExtraOpenApiKwargsTypedDict, + KwargsWithOpenApiKwargDefaultsTypedDict, +) +from pinecone.openapi_support.endpoint import Endpoint as _Endpoint, ExtraOpenApiKwargsTypedDict +from pinecone.openapi_support.asyncio_endpoint import AsyncioEndpoint as _AsyncioEndpoint +from pinecone.openapi_support.model_utils import ( # noqa: F401 + date, + datetime, + file_type, + none_type, + validate_and_convert_types, +) +from pinecone.core.openapi.admin.model.api_key import APIKey +from pinecone.core.openapi.admin.model.api_key_with_secret import APIKeyWithSecret +from pinecone.core.openapi.admin.model.create_api_key_request import CreateAPIKeyRequest +from pinecone.core.openapi.admin.model.inline_response2001 import InlineResponse2001 +from pinecone.core.openapi.admin.model.inline_response401 import InlineResponse401 + + +class APIKeysApi: + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def __create_api_key( + self, project_id, create_api_key_request, **kwargs: ExtraOpenApiKwargsTypedDict + ): + """Create an API key # noqa: E501 + + Create a new API key for a project. Developers can use the API key to authenticate requests to Pinecone's Data Plane and Control Plane APIs. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_api_key(project_id, create_api_key_request, async_req=True) + >>> result = thread.get() + + Args: + project_id (str): Project ID + create_api_key_request (CreateAPIKeyRequest): The details of the new API key. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + APIKeyWithSecret + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["project_id"] = project_id + kwargs["create_api_key_request"] = create_api_key_request + return self.call_with_http_info(**kwargs) + + self.create_api_key = _Endpoint( + settings={ + "response_type": (APIKeyWithSecret,), + "auth": ["BearerAuth"], + "endpoint_path": "/admin/projects/{project_id}/api-keys", + "operation_id": "create_api_key", + "http_method": "POST", + "servers": None, + }, + params_map={ + "all": ["project_id", "create_api_key_request"], + "required": ["project_id", "create_api_key_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "project_id": (str,), + "create_api_key_request": (CreateAPIKeyRequest,), + }, + "attribute_map": {"project_id": "project_id"}, + "location_map": {"project_id": "path", "create_api_key_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__create_api_key, + ) + + def __delete_api_key(self, api_key_id, **kwargs: ExtraOpenApiKwargsTypedDict): + """Delete an API key # noqa: E501 + + Delete an API key from a project. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_api_key(api_key_id, async_req=True) + >>> result = thread.get() + + Args: + api_key_id (str): API key ID + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + None + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["api_key_id"] = api_key_id + return self.call_with_http_info(**kwargs) + + self.delete_api_key = _Endpoint( + settings={ + "response_type": None, + "auth": ["BearerAuth"], + "endpoint_path": "/admin/api-keys/{api_key_id}", + "operation_id": "delete_api_key", + "http_method": "DELETE", + "servers": None, + }, + params_map={ + "all": ["api_key_id"], + "required": ["api_key_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"api_key_id": (str,)}, + "attribute_map": {"api_key_id": "api_key_id"}, + "location_map": {"api_key_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__delete_api_key, + ) + + def __fetch_api_key(self, api_key_id, **kwargs: ExtraOpenApiKwargsTypedDict): + """Get API key details # noqa: E501 + + Get the details of an API key, excluding the API key secret. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.fetch_api_key(api_key_id, async_req=True) + >>> result = thread.get() + + Args: + api_key_id (str): API key ID + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + APIKey + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["api_key_id"] = api_key_id + return self.call_with_http_info(**kwargs) + + self.fetch_api_key = _Endpoint( + settings={ + "response_type": (APIKey,), + "auth": ["BearerAuth"], + "endpoint_path": "/admin/api-keys/{api_key_id}", + "operation_id": "fetch_api_key", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["api_key_id"], + "required": ["api_key_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"api_key_id": (str,)}, + "attribute_map": {"api_key_id": "api_key_id"}, + "location_map": {"api_key_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__fetch_api_key, + ) + + def __list_api_keys(self, project_id, **kwargs: ExtraOpenApiKwargsTypedDict): + """List API keys # noqa: E501 + + List all API keys in a project. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_api_keys(project_id, async_req=True) + >>> result = thread.get() + + Args: + project_id (str): Project ID + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + InlineResponse2001 + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["project_id"] = project_id + return self.call_with_http_info(**kwargs) + + self.list_api_keys = _Endpoint( + settings={ + "response_type": (InlineResponse2001,), + "auth": ["BearerAuth"], + "endpoint_path": "/admin/projects/{project_id}/api-keys", + "operation_id": "list_api_keys", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["project_id"], + "required": ["project_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"project_id": (str,)}, + "attribute_map": {"project_id": "project_id"}, + "location_map": {"project_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_api_keys, + ) + + +class AsyncioAPIKeysApi: + """NOTE: This class is @generated using OpenAPI + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = AsyncioApiClient() + self.api_client = api_client + + async def __create_api_key(self, project_id, create_api_key_request, **kwargs): + """Create an API key # noqa: E501 + + Create a new API key for a project. Developers can use the API key to authenticate requests to Pinecone's Data Plane and Control Plane APIs. # noqa: E501 + + + Args: + project_id (str): Project ID + create_api_key_request (CreateAPIKeyRequest): The details of the new API key. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + APIKeyWithSecret + """ + self._process_openapi_kwargs(kwargs) + kwargs["project_id"] = project_id + kwargs["create_api_key_request"] = create_api_key_request + return await self.call_with_http_info(**kwargs) + + self.create_api_key = _AsyncioEndpoint( + settings={ + "response_type": (APIKeyWithSecret,), + "auth": ["BearerAuth"], + "endpoint_path": "/admin/projects/{project_id}/api-keys", + "operation_id": "create_api_key", + "http_method": "POST", + "servers": None, + }, + params_map={ + "all": ["project_id", "create_api_key_request"], + "required": ["project_id", "create_api_key_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "project_id": (str,), + "create_api_key_request": (CreateAPIKeyRequest,), + }, + "attribute_map": {"project_id": "project_id"}, + "location_map": {"project_id": "path", "create_api_key_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__create_api_key, + ) + + async def __delete_api_key(self, api_key_id, **kwargs): + """Delete an API key # noqa: E501 + + Delete an API key from a project. # noqa: E501 + + + Args: + api_key_id (str): API key ID + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + None + """ + self._process_openapi_kwargs(kwargs) + kwargs["api_key_id"] = api_key_id + return await self.call_with_http_info(**kwargs) + + self.delete_api_key = _AsyncioEndpoint( + settings={ + "response_type": None, + "auth": ["BearerAuth"], + "endpoint_path": "/admin/api-keys/{api_key_id}", + "operation_id": "delete_api_key", + "http_method": "DELETE", + "servers": None, + }, + params_map={ + "all": ["api_key_id"], + "required": ["api_key_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"api_key_id": (str,)}, + "attribute_map": {"api_key_id": "api_key_id"}, + "location_map": {"api_key_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__delete_api_key, + ) + + async def __fetch_api_key(self, api_key_id, **kwargs): + """Get API key details # noqa: E501 + + Get the details of an API key, excluding the API key secret. # noqa: E501 + + + Args: + api_key_id (str): API key ID + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + APIKey + """ + self._process_openapi_kwargs(kwargs) + kwargs["api_key_id"] = api_key_id + return await self.call_with_http_info(**kwargs) + + self.fetch_api_key = _AsyncioEndpoint( + settings={ + "response_type": (APIKey,), + "auth": ["BearerAuth"], + "endpoint_path": "/admin/api-keys/{api_key_id}", + "operation_id": "fetch_api_key", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["api_key_id"], + "required": ["api_key_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"api_key_id": (str,)}, + "attribute_map": {"api_key_id": "api_key_id"}, + "location_map": {"api_key_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__fetch_api_key, + ) + + async def __list_api_keys(self, project_id, **kwargs): + """List API keys # noqa: E501 + + List all API keys in a project. # noqa: E501 + + + Args: + project_id (str): Project ID + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + InlineResponse2001 + """ + self._process_openapi_kwargs(kwargs) + kwargs["project_id"] = project_id + return await self.call_with_http_info(**kwargs) + + self.list_api_keys = _AsyncioEndpoint( + settings={ + "response_type": (InlineResponse2001,), + "auth": ["BearerAuth"], + "endpoint_path": "/admin/projects/{project_id}/api-keys", + "operation_id": "list_api_keys", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["project_id"], + "required": ["project_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"project_id": (str,)}, + "attribute_map": {"project_id": "project_id"}, + "location_map": {"project_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_api_keys, + ) diff --git a/pinecone/core/openapi/admin/api/projects_api.py b/pinecone/core/openapi/admin/api/projects_api.py new file mode 100644 index 00000000..0383d75f --- /dev/null +++ b/pinecone/core/openapi/admin/api/projects_api.py @@ -0,0 +1,703 @@ +""" +Pinecone Admin API + +Provides an API for managing a Pinecone organization and its resources. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support import ApiClient, AsyncioApiClient +from pinecone.openapi_support.endpoint_utils import ( + ExtraOpenApiKwargsTypedDict, + KwargsWithOpenApiKwargDefaultsTypedDict, +) +from pinecone.openapi_support.endpoint import Endpoint as _Endpoint, ExtraOpenApiKwargsTypedDict +from pinecone.openapi_support.asyncio_endpoint import AsyncioEndpoint as _AsyncioEndpoint +from pinecone.openapi_support.model_utils import ( # noqa: F401 + date, + datetime, + file_type, + none_type, + validate_and_convert_types, +) +from pinecone.core.openapi.admin.model.create_project_request import CreateProjectRequest +from pinecone.core.openapi.admin.model.inline_response200 import InlineResponse200 +from pinecone.core.openapi.admin.model.inline_response401 import InlineResponse401 +from pinecone.core.openapi.admin.model.project import Project +from pinecone.core.openapi.admin.model.update_project_request import UpdateProjectRequest + + +class ProjectsApi: + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def __create_project(self, create_project_request, **kwargs: ExtraOpenApiKwargsTypedDict): + """Create a new project # noqa: E501 + + Creates a new project. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.create_project(create_project_request, async_req=True) + >>> result = thread.get() + + Args: + create_project_request (CreateProjectRequest): The details of the new project. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + Project + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["create_project_request"] = create_project_request + return self.call_with_http_info(**kwargs) + + self.create_project = _Endpoint( + settings={ + "response_type": (Project,), + "auth": ["BearerAuth"], + "endpoint_path": "/admin/projects", + "operation_id": "create_project", + "http_method": "POST", + "servers": None, + }, + params_map={ + "all": ["create_project_request"], + "required": ["create_project_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"create_project_request": (CreateProjectRequest,)}, + "attribute_map": {}, + "location_map": {"create_project_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__create_project, + ) + + def __delete_project(self, project_id, **kwargs: ExtraOpenApiKwargsTypedDict): + """Delete a project # noqa: E501 + + Delete a project and all its associated configuration. Before deleting a project, you must delete all indexes, assistants, backups, and collections associated with the project. Other project resources, such as API keys, are automatically deleted when the project is deleted. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.delete_project(project_id, async_req=True) + >>> result = thread.get() + + Args: + project_id (str): Project ID + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + None + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["project_id"] = project_id + return self.call_with_http_info(**kwargs) + + self.delete_project = _Endpoint( + settings={ + "response_type": None, + "auth": ["BearerAuth"], + "endpoint_path": "/admin/projects/{project_id}", + "operation_id": "delete_project", + "http_method": "DELETE", + "servers": None, + }, + params_map={ + "all": ["project_id"], + "required": ["project_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"project_id": (str,)}, + "attribute_map": {"project_id": "project_id"}, + "location_map": {"project_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__delete_project, + ) + + def __fetch_project(self, project_id, **kwargs: ExtraOpenApiKwargsTypedDict): + """Get project details # noqa: E501 + + Get details about a project. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.fetch_project(project_id, async_req=True) + >>> result = thread.get() + + Args: + project_id (str): Project ID + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + Project + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["project_id"] = project_id + return self.call_with_http_info(**kwargs) + + self.fetch_project = _Endpoint( + settings={ + "response_type": (Project,), + "auth": ["BearerAuth"], + "endpoint_path": "/admin/projects/{project_id}", + "operation_id": "fetch_project", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["project_id"], + "required": ["project_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"project_id": (str,)}, + "attribute_map": {"project_id": "project_id"}, + "location_map": {"project_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__fetch_project, + ) + + def __list_projects(self, **kwargs: ExtraOpenApiKwargsTypedDict): + """List projects # noqa: E501 + + List all projects in an organization. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.list_projects(async_req=True) + >>> result = thread.get() + + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + InlineResponse200 + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + return self.call_with_http_info(**kwargs) + + self.list_projects = _Endpoint( + settings={ + "response_type": (InlineResponse200,), + "auth": ["BearerAuth"], + "endpoint_path": "/admin/projects", + "operation_id": "list_projects", + "http_method": "GET", + "servers": None, + }, + params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {}, + "attribute_map": {}, + "location_map": {}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_projects, + ) + + def __update_project( + self, project_id, update_project_request, **kwargs: ExtraOpenApiKwargsTypedDict + ): + """Update a project # noqa: E501 + + Update a project's configuration details. You can update the project's name, maximum number of Pods, or enable encryption with a customer-managed encryption key (CMEK). # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.update_project(project_id, update_project_request, async_req=True) + >>> result = thread.get() + + Args: + project_id (str): Project ID + update_project_request (UpdateProjectRequest): Project details to be updated. Fields that are omitted will not be updated. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + Project + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["project_id"] = project_id + kwargs["update_project_request"] = update_project_request + return self.call_with_http_info(**kwargs) + + self.update_project = _Endpoint( + settings={ + "response_type": (Project,), + "auth": ["BearerAuth"], + "endpoint_path": "/admin/projects/{project_id}", + "operation_id": "update_project", + "http_method": "PATCH", + "servers": None, + }, + params_map={ + "all": ["project_id", "update_project_request"], + "required": ["project_id", "update_project_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "project_id": (str,), + "update_project_request": (UpdateProjectRequest,), + }, + "attribute_map": {"project_id": "project_id"}, + "location_map": {"project_id": "path", "update_project_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__update_project, + ) + + +class AsyncioProjectsApi: + """NOTE: This class is @generated using OpenAPI + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = AsyncioApiClient() + self.api_client = api_client + + async def __create_project(self, create_project_request, **kwargs): + """Create a new project # noqa: E501 + + Creates a new project. # noqa: E501 + + + Args: + create_project_request (CreateProjectRequest): The details of the new project. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + Project + """ + self._process_openapi_kwargs(kwargs) + kwargs["create_project_request"] = create_project_request + return await self.call_with_http_info(**kwargs) + + self.create_project = _AsyncioEndpoint( + settings={ + "response_type": (Project,), + "auth": ["BearerAuth"], + "endpoint_path": "/admin/projects", + "operation_id": "create_project", + "http_method": "POST", + "servers": None, + }, + params_map={ + "all": ["create_project_request"], + "required": ["create_project_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"create_project_request": (CreateProjectRequest,)}, + "attribute_map": {}, + "location_map": {"create_project_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__create_project, + ) + + async def __delete_project(self, project_id, **kwargs): + """Delete a project # noqa: E501 + + Delete a project and all its associated configuration. Before deleting a project, you must delete all indexes, assistants, backups, and collections associated with the project. Other project resources, such as API keys, are automatically deleted when the project is deleted. # noqa: E501 + + + Args: + project_id (str): Project ID + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + None + """ + self._process_openapi_kwargs(kwargs) + kwargs["project_id"] = project_id + return await self.call_with_http_info(**kwargs) + + self.delete_project = _AsyncioEndpoint( + settings={ + "response_type": None, + "auth": ["BearerAuth"], + "endpoint_path": "/admin/projects/{project_id}", + "operation_id": "delete_project", + "http_method": "DELETE", + "servers": None, + }, + params_map={ + "all": ["project_id"], + "required": ["project_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"project_id": (str,)}, + "attribute_map": {"project_id": "project_id"}, + "location_map": {"project_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__delete_project, + ) + + async def __fetch_project(self, project_id, **kwargs): + """Get project details # noqa: E501 + + Get details about a project. # noqa: E501 + + + Args: + project_id (str): Project ID + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + Project + """ + self._process_openapi_kwargs(kwargs) + kwargs["project_id"] = project_id + return await self.call_with_http_info(**kwargs) + + self.fetch_project = _AsyncioEndpoint( + settings={ + "response_type": (Project,), + "auth": ["BearerAuth"], + "endpoint_path": "/admin/projects/{project_id}", + "operation_id": "fetch_project", + "http_method": "GET", + "servers": None, + }, + params_map={ + "all": ["project_id"], + "required": ["project_id"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"project_id": (str,)}, + "attribute_map": {"project_id": "project_id"}, + "location_map": {"project_id": "path"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__fetch_project, + ) + + async def __list_projects(self, **kwargs): + """List projects # noqa: E501 + + List all projects in an organization. # noqa: E501 + + + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + InlineResponse200 + """ + self._process_openapi_kwargs(kwargs) + return await self.call_with_http_info(**kwargs) + + self.list_projects = _AsyncioEndpoint( + settings={ + "response_type": (InlineResponse200,), + "auth": ["BearerAuth"], + "endpoint_path": "/admin/projects", + "operation_id": "list_projects", + "http_method": "GET", + "servers": None, + }, + params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {}, + "attribute_map": {}, + "location_map": {}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": []}, + api_client=api_client, + callable=__list_projects, + ) + + async def __update_project(self, project_id, update_project_request, **kwargs): + """Update a project # noqa: E501 + + Update a project's configuration details. You can update the project's name, maximum number of Pods, or enable encryption with a customer-managed encryption key (CMEK). # noqa: E501 + + + Args: + project_id (str): Project ID + update_project_request (UpdateProjectRequest): Project details to be updated. Fields that are omitted will not be updated. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + Project + """ + self._process_openapi_kwargs(kwargs) + kwargs["project_id"] = project_id + kwargs["update_project_request"] = update_project_request + return await self.call_with_http_info(**kwargs) + + self.update_project = _AsyncioEndpoint( + settings={ + "response_type": (Project,), + "auth": ["BearerAuth"], + "endpoint_path": "/admin/projects/{project_id}", + "operation_id": "update_project", + "http_method": "PATCH", + "servers": None, + }, + params_map={ + "all": ["project_id", "update_project_request"], + "required": ["project_id", "update_project_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "project_id": (str,), + "update_project_request": (UpdateProjectRequest,), + }, + "attribute_map": {"project_id": "project_id"}, + "location_map": {"project_id": "path", "update_project_request": "body"}, + "collection_format_map": {}, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + callable=__update_project, + ) diff --git a/pinecone/core/openapi/admin/apis/__init__.py b/pinecone/core/openapi/admin/apis/__init__.py new file mode 100644 index 00000000..ea3a34ba --- /dev/null +++ b/pinecone/core/openapi/admin/apis/__init__.py @@ -0,0 +1,17 @@ +# flake8: noqa + +# Import all APIs into this package. +# If you have many APIs here with many many models used in each API this may +# raise a `RecursionError`. +# In order to avoid this, import only the API that you directly need like: +# +# from .api.api_keys_api import APIKeysApi +# +# or import this package, but before doing it, use: +# +# import sys +# sys.setrecursionlimit(n) + +# Import APIs into API package: +from pinecone.core.openapi.admin.api.api_keys_api import APIKeysApi +from pinecone.core.openapi.admin.api.projects_api import ProjectsApi diff --git a/pinecone/core/openapi/admin/model/__init__.py b/pinecone/core/openapi/admin/model/__init__.py new file mode 100644 index 00000000..cfe32b78 --- /dev/null +++ b/pinecone/core/openapi/admin/model/__init__.py @@ -0,0 +1,5 @@ +# we can not import model classes here because that would create a circular +# reference which would not work in python2 +# do not import all models into this module because that uses a lot of memory and stack frames +# if you need the ability to import all models from one package, import them with +# from {{packageName}.models import ModelA, ModelB diff --git a/pinecone/core/openapi/admin/model/api_key.py b/pinecone/core/openapi/admin/model/api_key.py new file mode 100644 index 00000000..6f48cdeb --- /dev/null +++ b/pinecone/core/openapi/admin/model/api_key.py @@ -0,0 +1,303 @@ +""" +Pinecone Admin API + +Provides an API for managing a Pinecone organization and its resources. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="APIKey") + + +class APIKey(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = { + ("roles",): { + "PROJECTEDITOR": "ProjectEditor", + "PROJECTVIEWER": "ProjectViewer", + "CONTROLPLANEEDITOR": "ControlPlaneEditor", + "CONTROLPLANEVIEWER": "ControlPlaneViewer", + "DATAPLANEEDITOR": "DataPlaneEditor", + "DATAPLANEVIEWER": "DataPlaneViewer", + } + } + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "id": (str,), # noqa: E501 + "name": (str,), # noqa: E501 + "project_id": (str,), # noqa: E501 + "roles": ([str],), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "id": "id", # noqa: E501 + "name": "name", # noqa: E501 + "project_id": "project_id", # noqa: E501 + "roles": "roles", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], id, name, project_id, roles, *args, **kwargs) -> T: # noqa: E501 + """APIKey - a model defined in OpenAPI + + Args: + id (str): The unique ID of the API key. + name (str): The name of the API key. + project_id (str): The ID of the project containing the API key. + roles ([str]): The roles assigned to the API key. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.id = id + self.name = name + self.project_id = project_id + self.roles = roles + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, id, name, project_id, roles, *args, **kwargs) -> None: # noqa: E501 + """APIKey - a model defined in OpenAPI + + Args: + id (str): The unique ID of the API key. + name (str): The name of the API key. + project_id (str): The ID of the project containing the API key. + roles ([str]): The roles assigned to the API key. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.id = id + self.name = name + self.project_id = project_id + self.roles = roles + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/admin/model/api_key_with_secret.py b/pinecone/core/openapi/admin/model/api_key_with_secret.py new file mode 100644 index 00000000..f073ec22 --- /dev/null +++ b/pinecone/core/openapi/admin/model/api_key_with_secret.py @@ -0,0 +1,290 @@ +""" +Pinecone Admin API + +Provides an API for managing a Pinecone organization and its resources. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.admin.model.api_key import APIKey + + globals()["APIKey"] = APIKey + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="APIKeyWithSecret") + + +class APIKeyWithSecret(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "key": (APIKey,), # noqa: E501 + "value": (str,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "key": "key", # noqa: E501 + "value": "value", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], key, value, *args, **kwargs) -> T: # noqa: E501 + """APIKeyWithSecret - a model defined in OpenAPI + + Args: + key (APIKey): + value (str): The value to use as an API key. New keys will have the format `\"pckey__\"`. The entire string should be used when authenticating. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.key = key + self.value = value + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, key, value, *args, **kwargs) -> None: # noqa: E501 + """APIKeyWithSecret - a model defined in OpenAPI + + Args: + key (APIKey): + value (str): The value to use as an API key. New keys will have the format `\"pckey__\"`. The entire string should be used when authenticating. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.key = key + self.value = value + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/admin/model/create_api_key_request.py b/pinecone/core/openapi/admin/model/create_api_key_request.py new file mode 100644 index 00000000..57c7f60e --- /dev/null +++ b/pinecone/core/openapi/admin/model/create_api_key_request.py @@ -0,0 +1,291 @@ +""" +Pinecone Admin API + +Provides an API for managing a Pinecone organization and its resources. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="CreateAPIKeyRequest") + + +class CreateAPIKeyRequest(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = { + ("roles",): { + "PROJECTEDITOR": "ProjectEditor", + "PROJECTVIEWER": "ProjectViewer", + "CONTROLPLANEEDITOR": "ControlPlaneEditor", + "CONTROLPLANEVIEWER": "ControlPlaneViewer", + "DATAPLANEEDITOR": "DataPlaneEditor", + "DATAPLANEVIEWER": "DataPlaneViewer", + } + } + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { + ("name",): {"max_length": 80, "min_length": 1} + } + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "name": (str,), # noqa: E501 + "roles": ([str],), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "name": "name", # noqa: E501 + "roles": "roles", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], name, *args, **kwargs) -> T: # noqa: E501 + """CreateAPIKeyRequest - a model defined in OpenAPI + + Args: + name (str): The name of the API key. The name must be 1-80 characters long. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + roles ([str]): The roles to create the API key with. Default is `[\"ProjectEditor\"]`. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.name = name + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, name, *args, **kwargs) -> None: # noqa: E501 + """CreateAPIKeyRequest - a model defined in OpenAPI + + Args: + name (str): The name of the API key. The name must be 1-80 characters long. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + roles ([str]): The roles to create the API key with. Default is `[\"ProjectEditor\"]`. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.name = name + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/admin/model/create_project_request.py b/pinecone/core/openapi/admin/model/create_project_request.py new file mode 100644 index 00000000..5c280fe1 --- /dev/null +++ b/pinecone/core/openapi/admin/model/create_project_request.py @@ -0,0 +1,286 @@ +""" +Pinecone Admin API + +Provides an API for managing a Pinecone organization and its resources. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="CreateProjectRequest") + + +class CreateProjectRequest(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { + ("name",): {"max_length": 512, "min_length": 1} + } + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "name": (str,), # noqa: E501 + "max_pods": (int,), # noqa: E501 + "force_encryption_with_cmek": (bool,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "name": "name", # noqa: E501 + "max_pods": "max_pods", # noqa: E501 + "force_encryption_with_cmek": "force_encryption_with_cmek", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], name, *args, **kwargs) -> T: # noqa: E501 + """CreateProjectRequest - a model defined in OpenAPI + + Args: + name (str): The name of the new project. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + max_pods (int): The maximum number of Pods that can be created in the project. Default is `0` (serverless only). [optional] # noqa: E501 + force_encryption_with_cmek (bool): Whether to force encryption with a customer-managed encryption key (CMEK). Default is `false`. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.name = name + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, name, *args, **kwargs) -> None: # noqa: E501 + """CreateProjectRequest - a model defined in OpenAPI + + Args: + name (str): The name of the new project. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + max_pods (int): The maximum number of Pods that can be created in the project. Default is `0` (serverless only). [optional] # noqa: E501 + force_encryption_with_cmek (bool): Whether to force encryption with a customer-managed encryption key (CMEK). Default is `false`. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.name = name + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/admin/model/inline_response200.py b/pinecone/core/openapi/admin/model/inline_response200.py new file mode 100644 index 00000000..ab807cf0 --- /dev/null +++ b/pinecone/core/openapi/admin/model/inline_response200.py @@ -0,0 +1,278 @@ +""" +Pinecone Admin API + +Provides an API for managing a Pinecone organization and its resources. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.admin.model.project import Project + + globals()["Project"] = Project + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="InlineResponse200") + + +class InlineResponse200(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "data": ([Project],) # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "data": "data" # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 + """InlineResponse200 - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + data ([Project]): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: # noqa: E501 + """InlineResponse200 - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + data ([Project]): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/admin/model/inline_response2001.py b/pinecone/core/openapi/admin/model/inline_response2001.py new file mode 100644 index 00000000..f7b4c6be --- /dev/null +++ b/pinecone/core/openapi/admin/model/inline_response2001.py @@ -0,0 +1,278 @@ +""" +Pinecone Admin API + +Provides an API for managing a Pinecone organization and its resources. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.admin.model.api_key import APIKey + + globals()["APIKey"] = APIKey + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="InlineResponse2001") + + +class InlineResponse2001(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "data": ([APIKey],) # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "data": "data" # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 + """InlineResponse2001 - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + data ([APIKey]): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: # noqa: E501 + """InlineResponse2001 - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + data ([APIKey]): [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/admin/model/inline_response401.py b/pinecone/core/openapi/admin/model/inline_response401.py new file mode 100644 index 00000000..f89c4f94 --- /dev/null +++ b/pinecone/core/openapi/admin/model/inline_response401.py @@ -0,0 +1,290 @@ +""" +Pinecone Admin API + +Provides an API for managing a Pinecone organization and its resources. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +def lazy_import(): + from pinecone.core.openapi.admin.model.inline_response401_error import InlineResponse401Error + + globals()["InlineResponse401Error"] = InlineResponse401Error + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="InlineResponse401") + + +class InlineResponse401(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + "status": (int,), # noqa: E501 + "error": (InlineResponse401Error,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "status": "status", # noqa: E501 + "error": "error", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # noqa: E501 + """InlineResponse401 - a model defined in OpenAPI + + Args: + status (int): The HTTP status code of the error. + error (InlineResponse401Error): + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.status = status + self.error = error + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, status, error, *args, **kwargs) -> None: # noqa: E501 + """InlineResponse401 - a model defined in OpenAPI + + Args: + status (int): The HTTP status code of the error. + error (InlineResponse401Error): + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.status = status + self.error = error + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/admin/model/inline_response401_error.py b/pinecone/core/openapi/admin/model/inline_response401_error.py new file mode 100644 index 00000000..1dbd766f --- /dev/null +++ b/pinecone/core/openapi/admin/model/inline_response401_error.py @@ -0,0 +1,308 @@ +""" +Pinecone Admin API + +Provides an API for managing a Pinecone organization and its resources. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="InlineResponse401Error") + + +class InlineResponse401Error(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = { + ("code",): { + "OK": "OK", + "UNKNOWN": "UNKNOWN", + "INVALID_ARGUMENT": "INVALID_ARGUMENT", + "DEADLINE_EXCEEDED": "DEADLINE_EXCEEDED", + "QUOTA_EXCEEDED": "QUOTA_EXCEEDED", + "NOT_FOUND": "NOT_FOUND", + "ALREADY_EXISTS": "ALREADY_EXISTS", + "PERMISSION_DENIED": "PERMISSION_DENIED", + "UNAUTHENTICATED": "UNAUTHENTICATED", + "RESOURCE_EXHAUSTED": "RESOURCE_EXHAUSTED", + "FAILED_PRECONDITION": "FAILED_PRECONDITION", + "ABORTED": "ABORTED", + "OUT_OF_RANGE": "OUT_OF_RANGE", + "UNIMPLEMENTED": "UNIMPLEMENTED", + "INTERNAL": "INTERNAL", + "UNAVAILABLE": "UNAVAILABLE", + "DATA_LOSS": "DATA_LOSS", + "FORBIDDEN": "FORBIDDEN", + "UNPROCESSABLE_ENTITY": "UNPROCESSABLE_ENTITY", + } + } + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "code": (str,), # noqa: E501 + "message": (str,), # noqa: E501 + "details": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "code": "code", # noqa: E501 + "message": "message", # noqa: E501 + "details": "details", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # noqa: E501 + """InlineResponse401Error - a model defined in OpenAPI + + Args: + code (str): + message (str): + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.code = code + self.message = message + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 + """InlineResponse401Error - a model defined in OpenAPI + + Args: + code (str): + message (str): + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.code = code + self.message = message + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/admin/model/project.py b/pinecone/core/openapi/admin/model/project.py new file mode 100644 index 00000000..eea8b20b --- /dev/null +++ b/pinecone/core/openapi/admin/model/project.py @@ -0,0 +1,317 @@ +""" +Pinecone Admin API + +Provides an API for managing a Pinecone organization and its resources. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="Project") + + +class Project(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { + ("name",): {"max_length": 512, "min_length": 1} + } + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "id": (str,), # noqa: E501 + "name": (str,), # noqa: E501 + "max_pods": (int,), # noqa: E501 + "force_encryption_with_cmek": (bool,), # noqa: E501 + "organization_id": (str,), # noqa: E501 + "created_at": (datetime,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "id": "id", # noqa: E501 + "name": "name", # noqa: E501 + "max_pods": "max_pods", # noqa: E501 + "force_encryption_with_cmek": "force_encryption_with_cmek", # noqa: E501 + "organization_id": "organization_id", # noqa: E501 + "created_at": "created_at", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data( + cls: Type[T], + id, + name, + max_pods, + force_encryption_with_cmek, + organization_id, + *args, + **kwargs, + ) -> T: # noqa: E501 + """Project - a model defined in OpenAPI + + Args: + id (str): The unique ID of the project. + name (str): The name of the project. + max_pods (int): The maximum number of Pods that can be created in the project. + force_encryption_with_cmek (bool): Whether to force encryption with a customer-managed encryption key (CMEK). + organization_id (str): The unique ID of the organization that the project belongs to. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + created_at (datetime): The date and time when the project was created. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.id = id + self.name = name + self.max_pods = max_pods + self.force_encryption_with_cmek = force_encryption_with_cmek + self.organization_id = organization_id + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__( + self, id, name, max_pods, force_encryption_with_cmek, organization_id, *args, **kwargs + ) -> None: # noqa: E501 + """Project - a model defined in OpenAPI + + Args: + id (str): The unique ID of the project. + name (str): The name of the project. + max_pods (int): The maximum number of Pods that can be created in the project. + force_encryption_with_cmek (bool): Whether to force encryption with a customer-managed encryption key (CMEK). + organization_id (str): The unique ID of the organization that the project belongs to. + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + created_at (datetime): The date and time when the project was created. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.id = id + self.name = name + self.max_pods = max_pods + self.force_encryption_with_cmek = force_encryption_with_cmek + self.organization_id = organization_id + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/admin/model/update_project_request.py b/pinecone/core/openapi/admin/model/update_project_request.py new file mode 100644 index 00000000..b061a9b2 --- /dev/null +++ b/pinecone/core/openapi/admin/model/update_project_request.py @@ -0,0 +1,280 @@ +""" +Pinecone Admin API + +Provides an API for managing a Pinecone organization and its resources. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="UpdateProjectRequest") + + +class UpdateProjectRequest(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = { + ("name",): {"max_length": 512, "min_length": 1} + } + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "name": (str,), # noqa: E501 + "max_pods": (int,), # noqa: E501 + "force_encryption_with_cmek": (bool,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "name": "name", # noqa: E501 + "max_pods": "max_pods", # noqa: E501 + "force_encryption_with_cmek": "force_encryption_with_cmek", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 + """UpdateProjectRequest - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): The name of the new project. [optional] # noqa: E501 + max_pods (int): The maximum number of Pods that can be created in the project. [optional] # noqa: E501 + force_encryption_with_cmek (bool): Whether to force encryption with a customer-managed encryption key (CMEK). Once enabled, CMEK encryption cannot be disabled. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: # noqa: E501 + """UpdateProjectRequest - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): The name of the new project. [optional] # noqa: E501 + max_pods (int): The maximum number of Pods that can be created in the project. [optional] # noqa: E501 + force_encryption_with_cmek (bool): Whether to force encryption with a customer-managed encryption key (CMEK). Once enabled, CMEK encryption cannot be disabled. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/admin/models/__init__.py b/pinecone/core/openapi/admin/models/__init__.py new file mode 100644 index 00000000..fd24744c --- /dev/null +++ b/pinecone/core/openapi/admin/models/__init__.py @@ -0,0 +1,21 @@ +# flake8: noqa + +# import all models into this package +# if you have many models here with many references from one model to another this may +# raise a RecursionError +# to avoid this, import only the models that you directly need like: +# from from pinecone.core.openapi.admin.model.pet import Pet +# or import this package, but before doing it, use: +# import sys +# sys.setrecursionlimit(n) + +from pinecone.core.openapi.admin.model.api_key import APIKey +from pinecone.core.openapi.admin.model.api_key_with_secret import APIKeyWithSecret +from pinecone.core.openapi.admin.model.create_api_key_request import CreateAPIKeyRequest +from pinecone.core.openapi.admin.model.create_project_request import CreateProjectRequest +from pinecone.core.openapi.admin.model.inline_response200 import InlineResponse200 +from pinecone.core.openapi.admin.model.inline_response2001 import InlineResponse2001 +from pinecone.core.openapi.admin.model.inline_response401 import InlineResponse401 +from pinecone.core.openapi.admin.model.inline_response401_error import InlineResponse401Error +from pinecone.core.openapi.admin.model.project import Project +from pinecone.core.openapi.admin.model.update_project_request import UpdateProjectRequest diff --git a/pinecone/core/openapi/db_data/api/namespace_operations_api.py b/pinecone/core/openapi/db_data/api/namespace_operations_api.py index e28e7430..0493286b 100644 --- a/pinecone/core/openapi/db_data/api/namespace_operations_api.py +++ b/pinecone/core/openapi/db_data/api/namespace_operations_api.py @@ -111,7 +111,7 @@ def __delete_namespace(self, namespace, **kwargs: ExtraOpenApiKwargsTypedDict): def __describe_namespace(self, namespace, **kwargs: ExtraOpenApiKwargsTypedDict): """Describe a namespace # noqa: E501 - Describe a namespace within an index, showing the vector count within the namespace. # noqa: E501 + Describe a [namespace](https://docs.pinecone.io/guides/index-data/indexing-overview#namespaces) in a serverless index, including the total number of vectors in the namespace. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -178,9 +178,9 @@ def __describe_namespace(self, namespace, **kwargs: ExtraOpenApiKwargsTypedDict) ) def __list_namespaces_operation(self, **kwargs: ExtraOpenApiKwargsTypedDict): - """Get list of all namespaces # noqa: E501 + """List namespaces # noqa: E501 - Get a list of all namespaces within an index. # noqa: E501 + Get a list of all [namespaces](https://docs.pinecone.io/guides/index-data/indexing-overview#namespaces) in a serverless index. Up to 100 namespaces are returned at a time by default, in sorted order (bitwise “C” collation). If the `limit` parameter is set, up to that number of namespaces are returned instead. Whenever there are additional namespaces to return, the response also includes a `pagination_token` that you can use to get the next batch of namespaces. When the response does not include a `pagination_token`, there are no more namespaces to return. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -322,7 +322,7 @@ async def __delete_namespace(self, namespace, **kwargs): async def __describe_namespace(self, namespace, **kwargs): """Describe a namespace # noqa: E501 - Describe a namespace within an index, showing the vector count within the namespace. # noqa: E501 + Describe a [namespace](https://docs.pinecone.io/guides/index-data/indexing-overview#namespaces) in a serverless index, including the total number of vectors in the namespace. # noqa: E501 Args: @@ -382,9 +382,9 @@ async def __describe_namespace(self, namespace, **kwargs): ) async def __list_namespaces_operation(self, **kwargs): - """Get list of all namespaces # noqa: E501 + """List namespaces # noqa: E501 - Get a list of all namespaces within an index. # noqa: E501 + Get a list of all [namespaces](https://docs.pinecone.io/guides/index-data/indexing-overview#namespaces) in a serverless index. Up to 100 namespaces are returned at a time by default, in sorted order (bitwise “C” collation). If the `limit` parameter is set, up to that number of namespaces are returned instead. Whenever there are additional namespaces to return, the response also includes a `pagination_token` that you can use to get the next batch of namespaces. When the response does not include a `pagination_token`, there are no more namespaces to return. # noqa: E501 diff --git a/pinecone/core/openapi/db_data/api/vector_operations_api.py b/pinecone/core/openapi/db_data/api/vector_operations_api.py index 25ad63c5..7802de53 100644 --- a/pinecone/core/openapi/db_data/api/vector_operations_api.py +++ b/pinecone/core/openapi/db_data/api/vector_operations_api.py @@ -350,7 +350,7 @@ def __list_vectors(self, **kwargs: ExtraOpenApiKwargsTypedDict): def __query_vectors(self, query_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Search with a vector # noqa: E501 - Search a namespace using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Search](https://docs.pinecone.io/guides/search/semantic-search). # noqa: E501 + Search a namespace using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance, examples, and limits, see [Search](https://docs.pinecone.io/guides/search/search-overview). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -421,7 +421,7 @@ def __search_records_namespace( ): """Search with text # noqa: E501 - Search a namespace with a query text, query vector, or record ID and return the most similar records, along with their similarity scores. Optionally, rerank the initial results based on their relevance to the query. Searching with text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). Searching with a query vector or record ID is supported for all indexes. For guidance and examples, see [Search](https://docs.pinecone.io/guides/search/semantic-search). # noqa: E501 + Search a namespace with a query text, query vector, or record ID and return the most similar records, along with their similarity scores. Optionally, rerank the initial results based on their relevance to the query. Searching with text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). Searching with a query vector or record ID is supported for all indexes. For guidance, examples, and limits, see [Search](https://docs.pinecone.io/guides/search/search-overview). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -566,7 +566,7 @@ def __upsert_records_namespace( ): """Upsert text # noqa: E501 - Upsert text into a namespace. Pinecone converts the text to vectors automatically using the hosted embedding model associated with the index. Upserting text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/reference/api/2025-01/control-plane/create_for_model). For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data#upsert-text). # noqa: E501 + Upsert text into a namespace. Pinecone converts the text to vectors automatically using the hosted embedding model associated with the index. Upserting text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/reference/api/2025-01/control-plane/create_for_model). For guidance, examples, and limits, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -637,7 +637,7 @@ def __upsert_records_namespace( def __upsert_vectors(self, upsert_request, **kwargs: ExtraOpenApiKwargsTypedDict): """Upsert vectors # noqa: E501 - Upsert vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data#upsert-vectors). # noqa: E501 + Upsert vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance, examples, and limits, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -983,7 +983,7 @@ async def __list_vectors(self, **kwargs): async def __query_vectors(self, query_request, **kwargs): """Search with a vector # noqa: E501 - Search a namespace using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Search](https://docs.pinecone.io/guides/search/semantic-search). # noqa: E501 + Search a namespace using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance, examples, and limits, see [Search](https://docs.pinecone.io/guides/search/search-overview). # noqa: E501 Args: @@ -1045,7 +1045,7 @@ async def __query_vectors(self, query_request, **kwargs): async def __search_records_namespace(self, namespace, search_records_request, **kwargs): """Search with text # noqa: E501 - Search a namespace with a query text, query vector, or record ID and return the most similar records, along with their similarity scores. Optionally, rerank the initial results based on their relevance to the query. Searching with text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). Searching with a query vector or record ID is supported for all indexes. For guidance and examples, see [Search](https://docs.pinecone.io/guides/search/semantic-search). # noqa: E501 + Search a namespace with a query text, query vector, or record ID and return the most similar records, along with their similarity scores. Optionally, rerank the initial results based on their relevance to the query. Searching with text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/guides/indexes/create-an-index#integrated-embedding). Searching with a query vector or record ID is supported for all indexes. For guidance, examples, and limits, see [Search](https://docs.pinecone.io/guides/search/search-overview). # noqa: E501 Args: @@ -1174,7 +1174,7 @@ async def __update_vector(self, update_request, **kwargs): async def __upsert_records_namespace(self, namespace, upsert_record, **kwargs): """Upsert text # noqa: E501 - Upsert text into a namespace. Pinecone converts the text to vectors automatically using the hosted embedding model associated with the index. Upserting text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/reference/api/2025-01/control-plane/create_for_model). For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data#upsert-text). # noqa: E501 + Upsert text into a namespace. Pinecone converts the text to vectors automatically using the hosted embedding model associated with the index. Upserting text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/reference/api/2025-01/control-plane/create_for_model). For guidance, examples, and limits, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data). # noqa: E501 Args: @@ -1238,7 +1238,7 @@ async def __upsert_records_namespace(self, namespace, upsert_record, **kwargs): async def __upsert_vectors(self, upsert_request, **kwargs): """Upsert vectors # noqa: E501 - Upsert vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data#upsert-vectors). # noqa: E501 + Upsert vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance, examples, and limits, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data). # noqa: E501 Args: diff --git a/pinecone/core/openapi/db_data/model/query_request.py b/pinecone/core/openapi/db_data/model/query_request.py index 989ad83e..74577d1b 100644 --- a/pinecone/core/openapi/db_data/model/query_request.py +++ b/pinecone/core/openapi/db_data/model/query_request.py @@ -170,7 +170,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) namespace (str): The namespace to query. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 include_values (bool): Indicates whether vector values are included in the response. [optional] if omitted the server will use the default value of False. # noqa: E501 include_metadata (bool): Indicates whether metadata is included in the response as well as the ids. [optional] if omitted the server will use the default value of False. # noqa: E501 queries ([QueryVector]): DEPRECATED. Use `vector` or `id` instead. [optional] # noqa: E501 @@ -271,7 +271,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) namespace (str): The namespace to query. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 + filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 include_values (bool): Indicates whether vector values are included in the response. [optional] if omitted the server will use the default value of False. # noqa: E501 include_metadata (bool): Indicates whether metadata is included in the response as well as the ids. [optional] if omitted the server will use the default value of False. # noqa: E501 queries ([QueryVector]): DEPRECATED. Use `vector` or `id` instead. [optional] # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/upsert_request.py b/pinecone/core/openapi/db_data/model/upsert_request.py index fccfb3c8..a00e1d61 100644 --- a/pinecone/core/openapi/db_data/model/upsert_request.py +++ b/pinecone/core/openapi/db_data/model/upsert_request.py @@ -115,7 +115,7 @@ def _from_openapi_data(cls: Type[T], vectors, *args, **kwargs) -> T: # noqa: E5 """UpsertRequest - a model defined in OpenAPI Args: - vectors ([Vector]): An array containing the vectors to upsert. Recommended batch limit is 100 vectors. + vectors ([Vector]): An array containing the vectors to upsert. Recommended batch limit is up to 1000 vectors. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -209,7 +209,7 @@ def __init__(self, vectors, *args, **kwargs) -> None: # noqa: E501 """UpsertRequest - a model defined in OpenAPI Args: - vectors ([Vector]): An array containing the vectors to upsert. Recommended batch limit is 100 vectors. + vectors ([Vector]): An array containing the vectors to upsert. Recommended batch limit is up to 1000 vectors. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/pinecone/core/openapi/inference/api/inference_api.py b/pinecone/core/openapi/inference/api/inference_api.py index 5c9d2efe..ad5f7d3e 100644 --- a/pinecone/core/openapi/inference/api/inference_api.py +++ b/pinecone/core/openapi/inference/api/inference_api.py @@ -46,7 +46,7 @@ def __init__(self, api_client=None) -> None: def __embed(self, **kwargs: ExtraOpenApiKwargsTypedDict): """Generate vectors # noqa: E501 - Generate vector embeddings for input data. This endpoint uses [Pinecone Inference](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding). # noqa: E501 + Generate vector embeddings for input data. This endpoint uses Pinecone's [hosted embedding models](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -111,9 +111,9 @@ def __embed(self, **kwargs: ExtraOpenApiKwargsTypedDict): ) def __get_model(self, model_name, **kwargs: ExtraOpenApiKwargsTypedDict): - """Get available model details. # noqa: E501 + """Describe a model # noqa: E501 - Get model details. # noqa: E501 + Get a description of a model hosted by Pinecone. You can use hosted models as an integrated part of Pinecone operations or for standalone embedding and reranking. For more details, see [Vector embedding](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding) and [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -180,9 +180,9 @@ def __get_model(self, model_name, **kwargs: ExtraOpenApiKwargsTypedDict): ) def __list_models(self, **kwargs: ExtraOpenApiKwargsTypedDict): - """Get available models. # noqa: E501 + """List available models # noqa: E501 - Get available models. # noqa: E501 + List the embedding and reranking models hosted by Pinecone. You can use hosted models as an integrated part of Pinecone operations or for standalone embedding and reranking. For more details, see [Vector embedding](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding) and [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -250,7 +250,7 @@ def __list_models(self, **kwargs: ExtraOpenApiKwargsTypedDict): def __rerank(self, **kwargs: ExtraOpenApiKwargsTypedDict): """Rerank documents # noqa: E501 - Rerank documents according to their relevance to a query. For guidance and examples, see [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 + Rerank results according to their relevance to a query. For guidance and examples, see [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True @@ -329,7 +329,7 @@ def __init__(self, api_client=None) -> None: async def __embed(self, **kwargs): """Generate vectors # noqa: E501 - Generate vector embeddings for input data. This endpoint uses [Pinecone Inference](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding). # noqa: E501 + Generate vector embeddings for input data. This endpoint uses Pinecone's [hosted embedding models](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models). # noqa: E501 @@ -387,9 +387,9 @@ async def __embed(self, **kwargs): ) async def __get_model(self, model_name, **kwargs): - """Get available model details. # noqa: E501 + """Describe a model # noqa: E501 - Get model details. # noqa: E501 + Get a description of a model hosted by Pinecone. You can use hosted models as an integrated part of Pinecone operations or for standalone embedding and reranking. For more details, see [Vector embedding](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding) and [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 Args: @@ -449,9 +449,9 @@ async def __get_model(self, model_name, **kwargs): ) async def __list_models(self, **kwargs): - """Get available models. # noqa: E501 + """List available models # noqa: E501 - Get available models. # noqa: E501 + List the embedding and reranking models hosted by Pinecone. You can use hosted models as an integrated part of Pinecone operations or for standalone embedding and reranking. For more details, see [Vector embedding](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding) and [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 @@ -512,7 +512,7 @@ async def __list_models(self, **kwargs): async def __rerank(self, **kwargs): """Rerank documents # noqa: E501 - Rerank documents according to their relevance to a query. For guidance and examples, see [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 + Rerank results according to their relevance to a query. For guidance and examples, see [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 diff --git a/pinecone/core/openapi/oauth/__init__.py b/pinecone/core/openapi/oauth/__init__.py new file mode 100644 index 00000000..95b9e822 --- /dev/null +++ b/pinecone/core/openapi/oauth/__init__.py @@ -0,0 +1,30 @@ +# flake8: noqa + +""" +Pinecone OAuth API + +Provides an API for authenticating with Pinecone. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +__version__ = "1.0.0" + +# import ApiClient +from pinecone.openapi_support.api_client import ApiClient + +# import Configuration +from pinecone.config.openapi_configuration import Configuration + +# import exceptions +from pinecone.openapi_support.exceptions import PineconeException +from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from pinecone.openapi_support.exceptions import PineconeApiTypeError +from pinecone.openapi_support.exceptions import PineconeApiValueError +from pinecone.openapi_support.exceptions import PineconeApiKeyError +from pinecone.openapi_support.exceptions import PineconeApiException + +API_VERSION = "2025-04" diff --git a/pinecone/core/openapi/oauth/api/__init__.py b/pinecone/core/openapi/oauth/api/__init__.py new file mode 100644 index 00000000..71a285ef --- /dev/null +++ b/pinecone/core/openapi/oauth/api/__init__.py @@ -0,0 +1,3 @@ +# do not import all apis into this module because that uses a lot of memory and stack frames +# if you need the ability to import all apis from one package, import them with +# from pinecone.core.openapi.oauth.apis import OAuthApi diff --git a/pinecone/core/openapi/oauth/api/o_auth_api.py b/pinecone/core/openapi/oauth/api/o_auth_api.py new file mode 100644 index 00000000..e2d90fb7 --- /dev/null +++ b/pinecone/core/openapi/oauth/api/o_auth_api.py @@ -0,0 +1,189 @@ +""" +Pinecone OAuth API + +Provides an API for authenticating with Pinecone. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support import ApiClient, AsyncioApiClient +from pinecone.openapi_support.endpoint_utils import ( + ExtraOpenApiKwargsTypedDict, + KwargsWithOpenApiKwargDefaultsTypedDict, +) +from pinecone.openapi_support.endpoint import Endpoint as _Endpoint, ExtraOpenApiKwargsTypedDict +from pinecone.openapi_support.asyncio_endpoint import AsyncioEndpoint as _AsyncioEndpoint +from pinecone.openapi_support.model_utils import ( # noqa: F401 + date, + datetime, + file_type, + none_type, + validate_and_convert_types, +) +from pinecone.core.openapi.oauth.model.inline_response400 import InlineResponse400 +from pinecone.core.openapi.oauth.model.token_request import TokenRequest +from pinecone.core.openapi.oauth.model.token_response import TokenResponse + + +class OAuthApi: + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient() + self.api_client = api_client + + def __get_token(self, token_request, **kwargs: ExtraOpenApiKwargsTypedDict): + """Get an access token # noqa: E501 + + Obtain an access token for a service account using the OAuth2 client credentials flow. An access token is needed to authorize requests to the Pinecone Admin API. The host domain for OAuth endpoints is `login.pinecone.io`. # noqa: E501 + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + + >>> thread = api.get_token(token_request, async_req=True) + >>> result = thread.get() + + Args: + token_request (TokenRequest): A request to exchange client credentials for an access token. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + async_req (bool): execute request asynchronously + + Returns: + TokenResponse + If the method is called asynchronously, returns the request + thread. + """ + kwargs = self._process_openapi_kwargs(kwargs) + kwargs["token_request"] = token_request + return self.call_with_http_info(**kwargs) + + self.get_token = _Endpoint( + settings={ + "response_type": (TokenResponse,), + "auth": [], + "endpoint_path": "/oauth/token", + "operation_id": "get_token", + "http_method": "POST", + "servers": None, + }, + params_map={ + "all": ["token_request"], + "required": ["token_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"token_request": (TokenRequest,)}, + "attribute_map": {}, + "location_map": {"token_request": "body"}, + "collection_format_map": {}, + }, + headers_map={ + "accept": ["application/json"], + "content_type": ["application/json", "application/x-www-form-urlencoded"], + }, + api_client=api_client, + callable=__get_token, + ) + + +class AsyncioOAuthApi: + """NOTE: This class is @generated using OpenAPI + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = AsyncioApiClient() + self.api_client = api_client + + async def __get_token(self, token_request, **kwargs): + """Get an access token # noqa: E501 + + Obtain an access token for a service account using the OAuth2 client credentials flow. An access token is needed to authorize requests to the Pinecone Admin API. The host domain for OAuth endpoints is `login.pinecone.io`. # noqa: E501 + + + Args: + token_request (TokenRequest): A request to exchange client credentials for an access token. + + Keyword Args: + _return_http_data_only (bool): response data without head status + code and headers. Default is True. + _preload_content (bool): if False, the urllib3.HTTPResponse object + will be returned without reading/decoding response data. + Default is True. + _request_timeout (int/float/tuple): timeout setting for this request. If + one number provided, it will be total request timeout. It can also + be a pair (tuple) of (connection, read) timeouts. + Default is None. + _check_input_type (bool): specifies if type checking + should be done one the data sent to the server. + Default is True. + _check_return_type (bool): specifies if type checking + should be done one the data received from the server. + Default is True. + + Returns: + TokenResponse + """ + self._process_openapi_kwargs(kwargs) + kwargs["token_request"] = token_request + return await self.call_with_http_info(**kwargs) + + self.get_token = _AsyncioEndpoint( + settings={ + "response_type": (TokenResponse,), + "auth": [], + "endpoint_path": "/oauth/token", + "operation_id": "get_token", + "http_method": "POST", + "servers": None, + }, + params_map={ + "all": ["token_request"], + "required": ["token_request"], + "nullable": [], + "enum": [], + "validation": [], + }, + root_map={ + "validations": {}, + "allowed_values": {}, + "openapi_types": {"token_request": (TokenRequest,)}, + "attribute_map": {}, + "location_map": {"token_request": "body"}, + "collection_format_map": {}, + }, + headers_map={ + "accept": ["application/json"], + "content_type": ["application/json", "application/x-www-form-urlencoded"], + }, + api_client=api_client, + callable=__get_token, + ) diff --git a/pinecone/core/openapi/oauth/apis/__init__.py b/pinecone/core/openapi/oauth/apis/__init__.py new file mode 100644 index 00000000..9da16952 --- /dev/null +++ b/pinecone/core/openapi/oauth/apis/__init__.py @@ -0,0 +1,16 @@ +# flake8: noqa + +# Import all APIs into this package. +# If you have many APIs here with many many models used in each API this may +# raise a `RecursionError`. +# In order to avoid this, import only the API that you directly need like: +# +# from .api.o_auth_api import OAuthApi +# +# or import this package, but before doing it, use: +# +# import sys +# sys.setrecursionlimit(n) + +# Import APIs into API package: +from pinecone.core.openapi.oauth.api.o_auth_api import OAuthApi diff --git a/pinecone/core/openapi/oauth/model/__init__.py b/pinecone/core/openapi/oauth/model/__init__.py new file mode 100644 index 00000000..cfe32b78 --- /dev/null +++ b/pinecone/core/openapi/oauth/model/__init__.py @@ -0,0 +1,5 @@ +# we can not import model classes here because that would create a circular +# reference which would not work in python2 +# do not import all models into this module because that uses a lot of memory and stack frames +# if you need the ability to import all models from one package, import them with +# from {{packageName}.models import ModelA, ModelB diff --git a/pinecone/core/openapi/oauth/model/inline_response400.py b/pinecone/core/openapi/oauth/model/inline_response400.py new file mode 100644 index 00000000..44d0a076 --- /dev/null +++ b/pinecone/core/openapi/oauth/model/inline_response400.py @@ -0,0 +1,274 @@ +""" +Pinecone OAuth API + +Provides an API for authenticating with Pinecone. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="InlineResponse400") + + +class InlineResponse400(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "error": (str,), # noqa: E501 + "error_description": (str,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "error": "error", # noqa: E501 + "error_description": "error_description", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 + """InlineResponse400 - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + error (str): A code identifying the error that occurred. [optional] # noqa: E501 + error_description (str): A human-readable description of the error. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs) -> None: # noqa: E501 + """InlineResponse400 - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + error (str): A code identifying the error that occurred. [optional] # noqa: E501 + error_description (str): A human-readable description of the error. [optional] # noqa: E501 + """ + + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/oauth/model/token_request.py b/pinecone/core/openapi/oauth/model/token_request.py new file mode 100644 index 00000000..8695acbd --- /dev/null +++ b/pinecone/core/openapi/oauth/model/token_request.py @@ -0,0 +1,301 @@ +""" +Pinecone OAuth API + +Provides an API for authenticating with Pinecone. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="TokenRequest") + + +class TokenRequest(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = { + ("grant_type",): {"CLIENT_CREDENTIALS": "client_credentials"}, + ("audience",): {"HTTPS://API.PINECONE.IO/": "https://api.pinecone.io/"}, + } + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "client_id": (str,), # noqa: E501 + "client_secret": (str,), # noqa: E501 + "grant_type": (str,), # noqa: E501 + "audience": (str,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "client_id": "client_id", # noqa: E501 + "client_secret": "client_secret", # noqa: E501 + "grant_type": "grant_type", # noqa: E501 + "audience": "audience", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], client_id, client_secret, *args, **kwargs) -> T: # noqa: E501 + """TokenRequest - a model defined in OpenAPI + + Args: + client_id (str): The service account's client ID. + client_secret (str): The service account's client secret. + + Keyword Args: + grant_type (str): The type of grant to use. defaults to "client_credentials", must be one of ["client_credentials", ] # noqa: E501 + audience (str): The audience for the token. defaults to "https://api.pinecone.io/", must be one of ["https://api.pinecone.io/", ] # noqa: E501 + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + grant_type = kwargs.get("grant_type", "client_credentials") + audience = kwargs.get("audience", "https://api.pinecone.io/") + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.client_id = client_id + self.client_secret = client_secret + self.grant_type = grant_type + self.audience = audience + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, client_id, client_secret, *args, **kwargs) -> None: # noqa: E501 + """TokenRequest - a model defined in OpenAPI + + Args: + client_id (str): The service account's client ID. + client_secret (str): The service account's client secret. + + Keyword Args: + grant_type (str): The type of grant to use. defaults to "client_credentials", must be one of ["client_credentials", ] # noqa: E501 + audience (str): The audience for the token. defaults to "https://api.pinecone.io/", must be one of ["https://api.pinecone.io/", ] # noqa: E501 + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + grant_type = kwargs.get("grant_type", "client_credentials") + audience = kwargs.get("audience", "https://api.pinecone.io/") + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.client_id = client_id + self.client_secret = client_secret + self.grant_type = grant_type + self.audience = audience + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/oauth/model/token_response.py b/pinecone/core/openapi/oauth/model/token_response.py new file mode 100644 index 00000000..d6039c0f --- /dev/null +++ b/pinecone/core/openapi/oauth/model/token_response.py @@ -0,0 +1,290 @@ +""" +Pinecone OAuth API + +Provides an API for authenticating with Pinecone. # noqa: E501 + +This file is @generated using OpenAPI. + +The version of the OpenAPI document: 2025-04 +Contact: support@pinecone.io +""" + +from pinecone.openapi_support.model_utils import ( # noqa: F401 + PineconeApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + OpenApiModel, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from pinecone.openapi_support.exceptions import PineconeApiAttributeError + + +from typing import Dict, Literal, Tuple, Set, Any, Type, TypeVar +from pinecone.openapi_support import PropertyValidationTypedDict, cached_class_property + +T = TypeVar("T", bound="TokenResponse") + + +class TokenResponse(ModelNormal): + """NOTE: This class is @generated using OpenAPI. + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + _data_store: Dict[str, Any] + _check_type: bool + + allowed_values: Dict[Tuple[str, ...], Dict[str, Any]] = {("token_type",): {"BEARER": "Bearer"}} + + validations: Dict[Tuple[str, ...], PropertyValidationTypedDict] = {} + + @cached_class_property + def additional_properties_type(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + return (bool, dict, float, int, list, str, none_type) # noqa: E501 + + _nullable = False + + @cached_class_property + def openapi_types(cls): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + return { + "access_token": (str,), # noqa: E501 + "token_type": (str,), # noqa: E501 + "expires_in": (int,), # noqa: E501 + } + + @cached_class_property + def discriminator(cls): + return None + + attribute_map: Dict[str, str] = { + "access_token": "access_token", # noqa: E501 + "token_type": "token_type", # noqa: E501 + "expires_in": "expires_in", # noqa: E501 + } + + read_only_vars: Set[str] = set([]) + + _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls: Type[T], access_token, expires_in, *args, **kwargs) -> T: # noqa: E501 + """TokenResponse - a model defined in OpenAPI + + Args: + access_token (str): The access token. + expires_in (int): The number of seconds until the token expires. + + Keyword Args: + token_type (str): The type of token. defaults to "Bearer", must be one of ["Bearer", ] # noqa: E501 + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + token_type = kwargs.get("token_type", "Bearer") + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) + _enforce_validations = kwargs.pop("_enforce_validations", False) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.access_token = access_token + self.token_type = token_type + self.expires_in = expires_in + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + return self + + required_properties = set( + [ + "_enforce_allowed_values", + "_enforce_validations", + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) + + @convert_js_args_to_python_args + def __init__(self, access_token, expires_in, *args, **kwargs) -> None: # noqa: E501 + """TokenResponse - a model defined in OpenAPI + + Args: + access_token (str): The access token. + expires_in (int): The number of seconds until the token expires. + + Keyword Args: + token_type (str): The type of token. defaults to "Bearer", must be one of ["Bearer", ] # noqa: E501 + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + """ + + token_type = kwargs.get("token_type", "Bearer") + _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) + _enforce_validations = kwargs.pop("_enforce_validations", True) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) + + if args: + raise PineconeApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % (args, self.__class__.__name__), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._enforce_allowed_values = _enforce_allowed_values + self._enforce_validations = _enforce_validations + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + self.access_token = access_token + self.token_type = token_type + self.expires_in = expires_in + for var_name, var_value in kwargs.items(): + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/openapi/oauth/models/__init__.py b/pinecone/core/openapi/oauth/models/__init__.py new file mode 100644 index 00000000..00cf81bf --- /dev/null +++ b/pinecone/core/openapi/oauth/models/__init__.py @@ -0,0 +1,14 @@ +# flake8: noqa + +# import all models into this package +# if you have many models here with many references from one model to another this may +# raise a RecursionError +# to avoid this, import only the models that you directly need like: +# from from pinecone.core.openapi.oauth.model.pet import Pet +# or import this package, but before doing it, use: +# import sys +# sys.setrecursionlimit(n) + +from pinecone.core.openapi.oauth.model.inline_response400 import InlineResponse400 +from pinecone.core.openapi.oauth.model.token_request import TokenRequest +from pinecone.core.openapi.oauth.model.token_response import TokenResponse diff --git a/poetry.lock b/poetry.lock index 34acba57..eba19d41 100644 --- a/poetry.lock +++ b/poetry.lock @@ -137,6 +137,28 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" +[[package]] +name = "alabaster" +version = "0.7.16" +description = "A light, configurable Sphinx theme" +optional = false +python-versions = ">=3.9" +files = [ + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, +] + +[[package]] +name = "alabaster" +version = "1.0.0" +description = "A light, configurable Sphinx theme" +optional = false +python-versions = ">=3.10" +files = [ + {file = "alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b"}, + {file = "alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e"}, +] + [[package]] name = "async-timeout" version = "5.0.1" @@ -167,6 +189,20 @@ docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphi tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +[[package]] +name = "babel" +version = "2.17.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +files = [ + {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, + {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, +] + +[package.extras] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] + [[package]] name = "beautifulsoup4" version = "4.13.3" @@ -396,6 +432,17 @@ files = [ {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, ] +[[package]] +name = "docutils" +version = "0.21.2" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.9" +files = [ + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, +] + [[package]] name = "exceptiongroup" version = "1.3.0" @@ -653,6 +700,40 @@ files = [ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.9" +files = [ + {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, + {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -664,6 +745,23 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "jinja2" +version = "3.1.6" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + [[package]] name = "lz4" version = "4.3.2" @@ -713,6 +811,130 @@ docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] flake8 = ["flake8"] tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.4.2" +description = "Collection of plugins for markdown-it-py" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, + {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0,<4.0.0" + +[package.extras] +code-style = ["pre-commit"] +rtd = ["myst-parser", "sphinx-book-theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "multidict" version = "6.1.0" @@ -874,6 +1096,58 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "myst-parser" +version = "3.0.1" +description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," +optional = false +python-versions = ">=3.8" +files = [ + {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"}, + {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"}, +] + +[package.dependencies] +docutils = ">=0.18,<0.22" +jinja2 = "*" +markdown-it-py = ">=3.0,<4.0" +mdit-py-plugins = ">=0.4,<1.0" +pyyaml = "*" +sphinx = ">=6,<8" + +[package.extras] +code-style = ["pre-commit (>=3.0,<4.0)"] +linkify = ["linkify-it-py (>=2.0,<3.0)"] +rtd = ["ipython", "sphinx (>=7)", "sphinx-autodoc2 (>=0.5.0,<0.6.0)", "sphinx-book-theme (>=1.1,<2.0)", "sphinx-copybutton", "sphinx-design", "sphinx-pyscript", "sphinx-tippy (>=0.4.3)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.9.0,<0.10.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] +testing = ["beautifulsoup4", "coverage[toml]", "defusedxml", "pytest (>=8,<9)", "pytest-cov", "pytest-param-files (>=0.6.0,<0.7.0)", "pytest-regressions", "sphinx-pytest"] +testing-docutils = ["pygments", "pytest (>=8,<9)", "pytest-param-files (>=0.6.0,<0.7.0)"] + +[[package]] +name = "myst-parser" +version = "4.0.1" +description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," +optional = false +python-versions = ">=3.10" +files = [ + {file = "myst_parser-4.0.1-py3-none-any.whl", hash = "sha256:9134e88959ec3b5780aedf8a99680ea242869d012e8821db3126d427edc9c95d"}, + {file = "myst_parser-4.0.1.tar.gz", hash = "sha256:5cfea715e4f3574138aecbf7d54132296bfd72bb614d31168f48c477a830a7c4"}, +] + +[package.dependencies] +docutils = ">=0.19,<0.22" +jinja2 = "*" +markdown-it-py = ">=3.0,<4.0" +mdit-py-plugins = ">=0.4.1,<1.0" +pyyaml = "*" +sphinx = ">=7,<9" + +[package.extras] +code-style = ["pre-commit (>=4.0,<5.0)"] +linkify = ["linkify-it-py (>=2.0,<3.0)"] +rtd = ["ipython", "sphinx (>=7)", "sphinx-autodoc2 (>=0.5.0,<0.6.0)", "sphinx-book-theme (>=1.1,<2.0)", "sphinx-copybutton", "sphinx-design", "sphinx-pyscript", "sphinx-tippy (>=0.4.3)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.9.0,<0.10.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] +testing = ["beautifulsoup4", "coverage[toml]", "defusedxml", "pygments (<2.19)", "pytest (>=8,<9)", "pytest-cov", "pytest-param-files (>=0.6.0,<0.7.0)", "pytest-regressions", "sphinx-pytest"] +testing-docutils = ["pygments", "pytest (>=8,<9)", "pytest-param-files (>=0.6.0,<0.7.0)"] + [[package]] name = "nodeenv" version = "1.9.1" @@ -1293,6 +1567,20 @@ files = [ {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, ] +[[package]] +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + [[package]] name = "pytest" version = "8.2.0" @@ -1558,6 +1846,21 @@ urllib3 = ">=1.25.10,<3.0" [package.extras] tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] +[[package]] +name = "roman-numerals-py" +version = "3.1.0" +description = "Manipulate well-formed Roman numerals" +optional = false +python-versions = ">=3.9" +files = [ + {file = "roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c"}, + {file = "roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d"}, +] + +[package.extras] +lint = ["mypy (==1.15.0)", "pyright (==1.1.394)", "ruff (==0.9.7)"] +test = ["pytest (>=8)"] + [[package]] name = "ruff" version = "0.9.3" @@ -1596,6 +1899,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "snowballstemmer" +version = "3.0.1" +description = "This package provides 32 stemmers for 30 languages generated from Snowball algorithms." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*" +files = [ + {file = "snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064"}, + {file = "snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895"}, +] + [[package]] name = "soupsieve" version = "2.6" @@ -1607,6 +1921,171 @@ files = [ {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] +[[package]] +name = "sphinx" +version = "7.4.7" +description = "Python documentation generator" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, + {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, +] + +[package.dependencies] +alabaster = ">=0.7.14,<0.8.0" +babel = ">=2.13" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +docutils = ">=0.20,<0.22" +imagesize = ">=1.3" +importlib-metadata = {version = ">=6.0", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.1" +packaging = ">=23.0" +Pygments = ">=2.17" +requests = ">=2.30.0" +snowballstemmer = ">=2.2" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.9" +tomli = {version = ">=2", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] + +[[package]] +name = "sphinx" +version = "8.2.3" +description = "Python documentation generator" +optional = false +python-versions = ">=3.11" +files = [ + {file = "sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3"}, + {file = "sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348"}, +] + +[package.dependencies] +alabaster = ">=0.7.14" +babel = ">=2.13" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +docutils = ">=0.20,<0.22" +imagesize = ">=1.3" +Jinja2 = ">=3.1" +packaging = ">=23.0" +Pygments = ">=2.17" +requests = ">=2.30.0" +roman-numerals-py = ">=1.0.0" +snowballstemmer = ">=2.2" +sphinxcontrib-applehelp = ">=1.0.7" +sphinxcontrib-devhelp = ">=1.0.6" +sphinxcontrib-htmlhelp = ">=2.0.6" +sphinxcontrib-jsmath = ">=1.0.1" +sphinxcontrib-qthelp = ">=1.0.6" +sphinxcontrib-serializinghtml = ">=1.1.9" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["betterproto (==2.0.0b6)", "mypy (==1.15.0)", "pypi-attestations (==0.0.21)", "pyright (==1.1.395)", "pytest (>=8.0)", "ruff (==0.9.9)", "sphinx-lint (>=0.9)", "types-Pillow (==10.2.0.20240822)", "types-Pygments (==2.19.0.20250219)", "types-colorama (==0.4.15.20240311)", "types-defusedxml (==0.7.0.20240218)", "types-docutils (==0.21.0.20241128)", "types-requests (==2.32.0.20241016)", "types-urllib3 (==1.26.25.14)"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "pytest-xdist[psutil] (>=3.4)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["defusedxml (>=0.7.1)", "pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + [[package]] name = "tomli" version = "2.2.1" @@ -1897,6 +2376,25 @@ idna = ">=2.0" multidict = ">=4.0" propcache = ">=0.2.0" +[[package]] +name = "zipp" +version = "3.23.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, + {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + [extras] asyncio = ["aiohttp", "aiohttp-retry"] grpc = ["googleapis-common-protos", "grpcio", "grpcio", "grpcio", "lz4", "protobuf", "protoc-gen-openapiv2"] @@ -1904,4 +2402,4 @@ grpc = ["googleapis-common-protos", "grpcio", "grpcio", "grpcio", "lz4", "protob [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "bd1c6f98884330cb57f648fa219387cce99f35647112e51f83a42b7d9e127f45" +content-hash = "181b0da9d7c63153cbf1502725f82e6817a56a8546e1bfb52453f1719e72d831" diff --git a/pyproject.toml b/pyproject.toml index b19d3dd3..42e718e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -101,6 +101,15 @@ beautifulsoup4 = "^4.13.3" vprof = "^0.38" tuna = "^0.5.11" python-dotenv = "^1.1.0" +sphinx = [ + { version = "^7.4.7", python = ">=3.9,<3.11" }, + { version = "^8.2.3", python = ">=3.11" } +] +myst-parser = [ + { version = "^3.0.1", python = ">=3.9,<3.10" }, + { version = "^4.0.1", python = ">=3.10" } +] + [tool.poetry.extras] grpc = ["grpcio", "googleapis-common-protos", "lz4", "protobuf", "protoc-gen-openapiv2"] diff --git a/tests/integration/admin/__init__.py b/tests/integration/admin/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/admin/conftest.py b/tests/integration/admin/conftest.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/admin/test_api_key.py b/tests/integration/admin/test_api_key.py new file mode 100644 index 00000000..6d1512ef --- /dev/null +++ b/tests/integration/admin/test_api_key.py @@ -0,0 +1,127 @@ +import logging + +from pinecone import Admin + +logger = logging.getLogger(__name__) + + +class TestAdminApiKey: + def test_create_api_key(self): + admin = Admin() + project_name = "test-project-for-api-key" + if not admin.project.exists(name=project_name): + project = admin.project.create(name=project_name) + else: + project = admin.project.get(name=project_name) + + try: + # Create an API key + key_response = admin.api_key.create(project_id=project.id, name="test-api-key1") + logger.info(f"API key created: {key_response.key.id}") + + assert key_response.key.created_at is not None + assert key_response.key.id is not None + assert isinstance(key_response.key.id, str) + assert key_response.key.name == "test-api-key1" + assert key_response.key.project_id == project.id + assert key_response.key.roles[0] == "ProjectEditor" + assert key_response.value is not None + assert isinstance(key_response.value, str) + + # Create a second API key with non-default role + key_response2 = admin.api_key.create( + project_id=project.id, name="test-api-key2", roles=["ProjectViewer"] + ) + logger.info(f"API key created: {key_response2.key.id}") + + assert key_response2.key.created_at is not None + assert key_response2.key.id is not None + assert isinstance(key_response2.key.id, str) + assert key_response2.key.name == "test-api-key2" + assert key_response2.key.project_id == project.id + assert key_response2.key.roles[0] == "ProjectViewer" + assert key_response2.value is not None + assert isinstance(key_response2.value, str) + + # Verify dictionary-style access to key attributes + assert key_response.key["created_at"] is not None + assert key_response.key["id"] is not None + assert isinstance(key_response.key["id"], str) + assert key_response.key["name"] == "test-api-key1" + assert key_response.key["project_id"] == project.id + + # Verify get-style access to key attributes + assert key_response.key.get("created_at") is not None + assert key_response.key.get("id") is not None + assert isinstance(key_response.key.get("id"), str) + assert key_response.key.get("name") == "test-api-key1" + assert key_response.key.get("project_id") == project.id + + # Get a key by id + key_response_by_id = admin.api_key.fetch(api_key_id=key_response.key.id) + assert key_response_by_id.id == key_response.key.id + assert key_response_by_id.name == key_response.key.name + assert key_response_by_id.project_id == key_response.key.project_id + assert key_response_by_id.roles == key_response.key.roles + + # List API keys + key_list = admin.api_key.list(project_id=project.id).data + assert isinstance(key_list, list) + assert len(key_list) == 2 + ids = [key.id for key in key_list] + assert key_response.key.id in ids + assert key_response2.key.id in ids + + # Delete the first API key + admin.api_key.delete(api_key_id=key_response.key.id) + logger.info(f"API key deleted: {key_response.key.id}") + + # Verify key is deleted + key_list = admin.api_key.list(project_id=project.id).data + logger.info(f"API keys: {key_list}") + assert isinstance(key_list, list) + assert len(key_list) == 1 + key_list_ids = [key.id for key in key_list] + assert key_response2.key.id in key_list_ids + assert key_response.key.id not in key_list_ids + + # Delete the second API key + admin.api_key.delete(api_key_id=key_response2.key.id) + logger.info(f"API key deleted: {key_response2.key.id}") + + # Verify all keys are deleted + key_list = admin.api_key.list(project_id=project.id).data + logger.info(f"API keys: {key_list}") + assert len(key_list) == 0 + finally: + # Clean up + admin.project.delete(project_id=project.id) + logger.info(f"Project deleted: {project.id}") + + def test_fetch_aliases(self): + admin = Admin() + project_name = "test-project-for-api-key" + if not admin.project.exists(name=project_name): + project = admin.project.create(name=project_name) + else: + project = admin.project.get(name=project_name) + + try: + # Create an API key + key_response = admin.api_key.create(project_id=project.id, name="test-api-key1") + + # Fetch the API key using the aliases + key_response_by_id = admin.api_key.fetch(api_key_id=key_response.key.id) + logger.info(f"API key by id: {key_response_by_id}") + assert key_response_by_id.id == key_response.key.id + + get_key_response = admin.api_key.get(api_key_id=key_response.key.id) + logger.info(f"API key by name: {get_key_response}") + assert get_key_response.id == key_response.key.id + + described_key_response = admin.api_key.describe(api_key_id=key_response.key.id) + assert described_key_response.id == key_response.key.id + + finally: + admin.project.delete(project_id=project.id) + logger.info(f"Project deleted: {project.id}") diff --git a/tests/integration/admin/test_projects.py b/tests/integration/admin/test_projects.py new file mode 100644 index 00000000..d1293240 --- /dev/null +++ b/tests/integration/admin/test_projects.py @@ -0,0 +1,131 @@ +import pytest +import logging +from pinecone import Admin, Pinecone, NotFoundException +from datetime import datetime + +logger = logging.getLogger(__name__) + + +class TestAdminProjects: + def test_create_project(self): + admin = Admin() + project = admin.project.create(name="test-project") + logger.info(f"Project created: {project}") + + try: + assert project.name == "test-project" + assert project.max_pods == 0 + assert project.force_encryption_with_cmek is False + assert project.organization_id is not None + assert isinstance(project.organization_id, str) + assert project.created_at is not None + assert isinstance(project.created_at, datetime) + + # Test dictionary-style access to project attributes + assert project["name"] == "test-project" + assert project["max_pods"] == 0 + assert project["force_encryption_with_cmek"] is False + assert project["organization_id"] is not None + assert isinstance(project["organization_id"], str) + assert project["created_at"] is not None + + # Test get-style access to project attributes + assert project.get("name") == "test-project" + assert project.get("max_pods") == 0 + assert project.get("force_encryption_with_cmek") is False + assert project.get("organization_id") is not None + assert isinstance(project.get("organization_id"), str) + assert project.get("created_at") is not None + + # Test projects can be listed. Combining this with the create + # test means we can be assured there is at least one project + project_list = admin.project.list().data + logger.info(f"Projects: {project_list}") + assert isinstance(project_list, list) + assert len(project_list) > 0 + + assert project_list[0].id is not None + assert project_list[0].name is not None + assert project_list[0].max_pods is not None + assert project_list[0].force_encryption_with_cmek is not None + assert project_list[0].organization_id is not None + assert project_list[0].created_at is not None + + # Test that I can fetch the project I just created by id + project_by_id = admin.project.get(project_id=project.id) + assert project_by_id.id == project.id + assert project_by_id.name == project.name + + # Test that I can fetch the project using aliased methods + project_by_id_alt = admin.project.describe(project_id=project.id) + assert project_by_id_alt.id == project.id + assert project_by_id_alt.name == project.name + + project_by_name_alt2 = admin.project.fetch(project_id=project.id) + assert project_by_name_alt2.id == project.id + assert project_by_name_alt2.name == project.name + + # Test that I can fetch the project I just created by name + project_by_name = admin.project.get(name=project.name) + assert project_by_name.id == project.id + assert project_by_name.name == project.name + + # Test that I can update the project + updated = admin.project.update( + project_id=project.id, + name="test-project-updated", + max_pods=1, + force_encryption_with_cmek=True, + ) + assert updated.id == project.id + assert updated.name == "test-project-updated" + assert updated.max_pods == 1 + assert updated.force_encryption_with_cmek is True + finally: + # Clean up + admin.project.delete(project_id=project.id) + logger.info(f"Project deleted: {project.id}") + + # Test that the project is deleted + with pytest.raises(NotFoundException): + admin.project.get(project_id=project.id) + + def test_delete_project_containing_indexes(self): + admin = Admin() + project = admin.project.create(name="test-project-with-stuff") + logger.info(f"Project created: {project}") + + try: + # Create an api key + api_key = admin.api_key.create(project_id=project.id, name="test-api-key") + logger.info(f"API key created: {api_key.key.id}") + + pc = Pinecone(api_key=api_key.value) + created_index = pc.db.index.create( + name="test-index", + dimension=100, + metric="cosine", + spec={"serverless": {"cloud": "aws", "region": "us-east-1"}}, + deletion_protection="enabled", # extra hard to delete + ) + logger.info(f"Index created: {created_index.name}") + + # Delete the project + with pytest.raises(Exception) as e: + admin.project.delete(project_id=project.id, delete_all_indexes=True) + assert "Indexes with deletion protection enabled cannot be deleted" in str(e) + + pc.db.index.configure(name=created_index.name, deletion_protection="disabled") + + admin.project.delete(project_id=project.id, delete_all_indexes=True) + + logger.info(f"Project deleted: {project.id}") + finally: + # Clean up + if admin.project.exists(project_id=project.id): + admin.project.delete(project_id=project.id, delete_all_indexes=True) + logger.info(f"Project deleted: {project.id}") + + # Test that the project is deleted + with pytest.raises(NotFoundException): + admin.project.get(project_id=project.id) From 40b6d5a455ea1e195532dcf411f8bd0dc48fc6bd Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 13 Jun 2025 09:57:36 -0400 Subject: [PATCH 2/7] Update CI --- .github/actions/build-docs/action.yml | 21 --------------------- .github/actions/project-create/action.yml | 2 +- .github/actions/project-delete/action.yml | 2 +- 3 files changed, 2 insertions(+), 23 deletions(-) diff --git a/.github/actions/build-docs/action.yml b/.github/actions/build-docs/action.yml index c1b04351..1b7c8368 100644 --- a/.github/actions/build-docs/action.yml +++ b/.github/actions/build-docs/action.yml @@ -16,28 +16,7 @@ runs: include_asyncio: 'true' python_version: ${{ inputs.python-version }} - - name: Pretend this project requires Python 3.11 - shell: bash - run: | - # Poetry won't let me install sphinx as a dev dependency in this project - # because of the wide range of versions our library supports. So during this - # action, we'll pretend this project requires Python 3.11 or greater. - sed -i 's/python = "^3.9"/python = "^3.11"/' pyproject.toml - poetry lock - poetry install -E grpc -E asyncio - - - name: Install sphinx - shell: bash - run: | - poetry add sphinx myst-parser --group dev - - name: Build html documentation shell: bash run: | poetry run sphinx-build -b html docs docsbuild - - - name: Discard changes to pyproject.toml and poetry.lock - shell: bash - run: | - git checkout pyproject.toml - git checkout poetry.lock diff --git a/.github/actions/project-create/action.yml b/.github/actions/project-create/action.yml index c22297d3..375abca7 100644 --- a/.github/actions/project-create/action.yml +++ b/.github/actions/project-create/action.yml @@ -50,7 +50,7 @@ runs: - name: Create project id: create-project shell: bash - run: python3 ./.github/actions/project-create/script.py + run: poetry run python3 ./.github/actions/project-create/script.py env: API_VERSION: ${{ inputs.api_version }} PINECONE_SERVICE_ACCOUNT_CLIENT_ID: ${{ inputs.PINECONE_SERVICE_ACCOUNT_CLIENT_ID }} diff --git a/.github/actions/project-delete/action.yml b/.github/actions/project-delete/action.yml index 6f6bb6d6..5bf5cecc 100644 --- a/.github/actions/project-delete/action.yml +++ b/.github/actions/project-delete/action.yml @@ -45,7 +45,7 @@ runs: - name: Delete project id: delete-project shell: bash - run: python3 ./.github/actions/project-delete/delete-project.py + run: poetry run python3 ./.github/actions/project-delete/delete-project.py env: API_VERSION: ${{ inputs.api_version }} PINECONE_SERVICE_ACCOUNT_CLIENT_ID: ${{ inputs.PINECONE_SERVICE_ACCOUNT_CLIENT_ID }} From fac24ba2806cd23a30281f583a77a95bb18d31e5 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 13 Jun 2025 10:07:14 -0400 Subject: [PATCH 3/7] Fix type issue --- pinecone/admin/admin.py | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/pinecone/admin/admin.py b/pinecone/admin/admin.py index 5276f018..9b63765f 100644 --- a/pinecone/admin/admin.py +++ b/pinecone/admin/admin.py @@ -63,20 +63,24 @@ def __init__( if client_id is not None: self._client_id = client_id - elif os.getenv("PINECONE_CLIENT_ID") is not None: - self._client_id = os.getenv("PINECONE_CLIENT_ID") else: - raise ValueError( - "client_id is not set. Pass client_id to the Admin constructor or set the PINECONE_CLIENT_ID environment variable." - ) + env_client_id = os.environ.get("PINECONE_CLIENT_ID", None) + if env_client_id is None: + raise ValueError( + "client_id is not set. Pass client_id to the Admin constructor or set the PINECONE_CLIENT_ID environment variable." + ) + self._client_id = env_client_id + if client_secret is not None: self._client_secret = client_secret - elif os.getenv("PINECONE_CLIENT_SECRET") is not None: - self._client_secret = os.getenv("PINECONE_CLIENT_SECRET") else: - raise ValueError( - "client_secret is not set. Pass client_secret to the Admin constructor or set the PINECONE_CLIENT_SECRET environment variable." - ) + env_client_secret = os.environ.get("PINECONE_CLIENT_SECRET", None) + if env_client_secret is None: + raise ValueError( + "client_secret is not set. Pass client_secret to the Admin constructor or set the PINECONE_CLIENT_SECRET environment variable." + ) + self._client_secret = env_client_secret + if additional_headers is None: additional_headers = {} From cd3c53e9973d95c71c075ec8a89a8fa723167fe1 Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 13 Jun 2025 10:12:09 -0400 Subject: [PATCH 4/7] Iterate on CI --- .github/actions/project-create/script.py | 2 +- pinecone/admin/admin.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/actions/project-create/script.py b/.github/actions/project-create/script.py index 0a30587b..dade9e01 100644 --- a/.github/actions/project-create/script.py +++ b/.github/actions/project-create/script.py @@ -40,7 +40,7 @@ def main(): project_name = generate_project_name() max_pods = int(os.getenv("MAX_PODS", 1)) project = admin_api.project.create(name=project_name, max_pods=max_pods) - project_api_key = admin_api.api_key.create(project_id=project.id).value + project_api_key = admin_api.api_key.create(project_id=project.id, name="ci-key").value mask(project_api_key) output_file = os.environ.get("GITHUB_OUTPUT", None) diff --git a/pinecone/admin/admin.py b/pinecone/admin/admin.py index 9b63765f..b8827d0e 100644 --- a/pinecone/admin/admin.py +++ b/pinecone/admin/admin.py @@ -1,5 +1,6 @@ from pinecone.config import OpenApiConfiguration from pinecone.openapi_support import ApiClient +from pinecone.core.openapi.oauth import API_VERSION from pinecone.core.openapi.oauth.apis import OAuthApi from pinecone.core.openapi.oauth.models import TokenRequest from typing import Optional, Dict @@ -87,7 +88,7 @@ def __init__( _oauth_api_config = OpenApiConfiguration(host="https://login.pinecone.io") _oauth_api_client = ApiClient(configuration=_oauth_api_config) - _oauth_api_client.set_default_header("X-Pinecone-Api-Version", "2025-04") + _oauth_api_client.set_default_header("X-Pinecone-Api-Version", API_VERSION) for key, value in additional_headers.items(): _oauth_api_client.set_default_header(key, value) _oauth_api_client.user_agent = get_user_agent(Config()) @@ -108,7 +109,7 @@ def __init__( _child_api_config.api_key = {"BearerAuth": self._token} self._child_api_client = ApiClient(configuration=_child_api_config) - self._child_api_client.set_default_header("X-Pinecone-Api-Version", "2025-04") + self._child_api_client.set_default_header("X-Pinecone-Api-Version", API_VERSION) for key, value in additional_headers.items(): self._child_api_client.set_default_header(key, value) self._child_api_client.user_agent = get_user_agent(Config()) From 2aea66e3bbd5aca0b8494f1f6a430ffe4fa1a28a Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 13 Jun 2025 12:14:45 -0400 Subject: [PATCH 5/7] Refactor eraser code --- pinecone/admin/eraser/__init__.py | 0 pinecone/admin/eraser/project_eraser.py | 248 +++++++++++++++++ pinecone/admin/eraser/resources/__init__.py | 6 + .../eraser/resources/_deleteable_resource.py | 28 ++ pinecone/admin/eraser/resources/backup.py | 30 ++ pinecone/admin/eraser/resources/collection.py | 23 ++ pinecone/admin/eraser/resources/index.py | 23 ++ pinecone/admin/eraser/retry_counter.py | 15 + pinecone/admin/project_eraser.py | 263 ------------------ pinecone/admin/resources/project.py | 9 +- 10 files changed, 378 insertions(+), 267 deletions(-) create mode 100644 pinecone/admin/eraser/__init__.py create mode 100644 pinecone/admin/eraser/project_eraser.py create mode 100644 pinecone/admin/eraser/resources/__init__.py create mode 100644 pinecone/admin/eraser/resources/_deleteable_resource.py create mode 100644 pinecone/admin/eraser/resources/backup.py create mode 100644 pinecone/admin/eraser/resources/collection.py create mode 100644 pinecone/admin/eraser/resources/index.py create mode 100644 pinecone/admin/eraser/retry_counter.py delete mode 100644 pinecone/admin/project_eraser.py diff --git a/pinecone/admin/eraser/__init__.py b/pinecone/admin/eraser/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pinecone/admin/eraser/project_eraser.py b/pinecone/admin/eraser/project_eraser.py new file mode 100644 index 00000000..90ea19d0 --- /dev/null +++ b/pinecone/admin/eraser/project_eraser.py @@ -0,0 +1,248 @@ +import logging +from pinecone import Pinecone, NotFoundException +import time +from collections import deque +from .resources import ( + _DeleteableResource, + _DeleteableIndex, + _DeleteableCollection, + _DeleteableBackup, +) +from typing import NamedTuple +from .retry_counter import _RetryCounter + +logger = logging.getLogger(__name__) + + +class DeletionFailure(NamedTuple): + resource_type: str + resource_name: str + reason: str + + +class _ProjectEraser: + """ + This class is used to delete all resources within a project + """ + + def __init__(self, api_key, max_retries=5, sleep_interval=0.5): + self.api_key = api_key + self.pc = Pinecone(api_key=api_key) + + # In situations where there are a lot of resources, we want to + # slow down the rate of requests just to avoid any concerns about + # rate limits + self.sleep_interval = sleep_interval + self.undeleteable_resources = [] + self.max_retries = max_retries + + self.state_check_retries = _RetryCounter(self.max_retries) + self.failed_delete_retries = _RetryCounter(self.max_retries) + self.is_deletable_retries = _RetryCounter(self.max_retries) + self.is_terminating_retries = _RetryCounter(self.max_retries * 3) + + self.undeletable_resources = [] + + def _get_state(self, dr, resource, delete_queue): + should_continue = False + label = f"{dr.name()} {resource.name}" + try: + self.state_check_retries.increment(resource.name) + state = dr.get_state(name=resource.name) + logger.debug(f"{label} has state {state}") + should_continue = False + except NotFoundException: + logger.debug(f"{label} has already been deleted, continuing") + should_continue = True + except Exception as e: + if self.state_check_retries.is_maxed_out(resource.name): + logger.error(f"Error describing {label}: {e}") + self.undeletable_resources.append( + DeletionFailure( + resource_type=dr.name(), + resource_name=resource.name, + reason=f"Error describing {label}: {e}", + ) + ) + should_continue = True + else: + logger.debug(f"{label} has been returned to the back of the delete queue") + delete_queue.append(resource) + should_continue = True + + return (state, should_continue) + + def _check_if_terminating(self, state, dr, resource, delete_queue): + should_continue = False + terminating_states = ["Terminating", "Terminated"] + label = f"{dr.name()} {resource.name}" + if state in terminating_states: + self.is_terminating_retries.increment(resource.name) + if self.is_terminating_retries.is_maxed_out(resource.name): + logger.error(f"{label} has been in the terminating state for too long, skipping") + self.undeletable_resources.append( + DeletionFailure( + resource_type=dr.name(), + resource_name=resource.name, + reason=f"{label} has been in the terminating state for too long", + ) + ) + should_continue = True + else: + logger.debug( + f"{label} is in the process of being deleted, adding to the back of the delete queue to recheck later" + ) + delete_queue.append(resource) + should_continue = True + + return should_continue + + def _check_if_deletable(self, state, dr, resource, delete_queue): + should_continue = False + label = f"{dr.name()} {resource.name}" + deleteable_states = ["Ready", "InitializationFailed"] + if state not in deleteable_states: + self.is_deletable_retries.increment(resource.name) + if self.is_deletable_retries.is_maxed_out(resource.name): + attempts = self.is_deletable_retries.get_count(resource.name) + logger.error( + f"{label} did not enter a deleteable state after {attempts} attempts, skipping" + ) + self.undeletable_resources.append( + DeletionFailure( + resource_type=dr.name(), + resource_name=resource.name, + reason=f"Not in a deleteable state after {attempts} attempts", + ) + ) + should_continue = True + else: + logger.debug( + f"{label} state {state} is not deleteable, adding to the back of the delete queue" + ) + delete_queue.append(resource) + should_continue = True + + return should_continue + + def _attempt_delete(self, dr, resource, delete_queue): + should_continue = False + label = f"{dr.name()} {resource.name}" + try: + logger.debug(f"Attempting deleting of {label}") + dr.delete(name=resource.name) + logger.debug(f"Successfully deleted {label}") + except Exception as e: + logger.error(f"Error deleting {label}: {e}") + self.failed_delete_retries.increment(resource.name) + if self.failed_delete_retries.is_maxed_out(resource.name): + attempts = self.failed_delete_retries.get_count(resource.name) + logger.error(f"Failed to delete {label} after {attempts} attempts, skipping") + self.undeletable_resources.append( + DeletionFailure( + resource_type=dr.name(), + resource_name=resource.name, + reason=f"Failed to delete after {attempts} attempts", + ) + ) + should_continue = True + else: + logger.debug(f"{label} has been returned to the back of the delete queue") + delete_queue.append(resource) + should_continue = True + + return should_continue + + def _log_final_state(self, dr): + if len(self.undeletable_resources) > 0: + logger.error( + f"There were {len(self.undeletable_resources)} {dr.name_plural()} that were not deleted" + ) + for item in self.undeletable_resources: + logger.error( + f"{item.resource_type} {item.resource_name} was not deleted because {item.reason}" + ) + self.undeleteable_resources.append(item) + else: + logger.debug(f"All {dr.name_plural()} were deleted successfully") + + def _delete_resource_type(self, dr: _DeleteableResource): + delete_queue = deque(dr.list()) + if len(delete_queue) == 0: + logger.debug(f"No {dr.name_plural()} to delete") + return + + while len(delete_queue) > 0: + logger.debug(f"There are {len(delete_queue)} {dr.name_plural()} left to delete") + time.sleep(self.sleep_interval) + resource = delete_queue.popleft() + label = f"{dr.name()} {resource.name}" + + logger.debug(f"Processing {label}") + + # Get the latest description of the resource + state, should_continue = self._get_state(dr, resource, delete_queue) + if should_continue: + continue + + # If the resource is in the terminating state, add it to the back of the delete queue to recheck later + should_continue = self._check_if_terminating(state, dr, resource, delete_queue) + if should_continue: + continue + + # If the index is not in a deleteable state, add it to the back of the delete queue + should_continue = self._check_if_deletable(state, dr, resource, delete_queue) + if should_continue: + continue + + # If the resource is deletable, delete it + should_continue = self._attempt_delete(dr, resource, delete_queue) + if should_continue: + continue + + self._log_final_state(dr) + + def delete_all_indexes(self, force_delete=False): + index_list = self.pc.db.index.list() + index_with_deletion_protection = [ + index for index in index_list if index.deletion_protection == "enabled" + ] + if not force_delete and len(index_with_deletion_protection) > 0: + raise Exception( + f"Indexes with deletion protection enabled cannot be deleted: {[i.name for i in index_with_deletion_protection]}" + ) + + for index in index_with_deletion_protection: + logger.debug(f"Disabling deletion protection for Index {index.name}") + time.sleep(self.sleep_interval) + try: + self.pc.db.index.configure(name=index.name, deletion_protection="disabled") + except Exception as e: + logger.error(f"Error disabling deletion protection for Index {index.name}: {e}") + self.undeleteable_resources.append( + DeletionFailure( + resource_type="index", + resource_name=index.name, + reason=f"Failed to disable deletion protection: {e}", + ) + ) + + index_eraser = _DeleteableIndex(pc=self.pc) + return self._delete_resource_type(index_eraser) + + def delete_all_collections(self): + collection_eraser = _DeleteableCollection(pc=self.pc) + return self._delete_resource_type(collection_eraser) + + def delete_all_backups(self): + backup_eraser = _DeleteableBackup(pc=self.pc) + return self._delete_resource_type(backup_eraser) + + def retry_needed(self): + if len(self.undeleteable_resources) > 0: + logger.debug( + f"Retry needed for {len(self.undeleteable_resources)} undeleteable resources" + ) + return True + else: + return False diff --git a/pinecone/admin/eraser/resources/__init__.py b/pinecone/admin/eraser/resources/__init__.py new file mode 100644 index 00000000..c2ae61b4 --- /dev/null +++ b/pinecone/admin/eraser/resources/__init__.py @@ -0,0 +1,6 @@ +from ._deleteable_resource import _DeleteableResource +from .backup import _DeleteableBackup +from .collection import _DeleteableCollection +from .index import _DeleteableIndex + +__all__ = ["_DeleteableResource", "_DeleteableBackup", "_DeleteableCollection", "_DeleteableIndex"] diff --git a/pinecone/admin/eraser/resources/_deleteable_resource.py b/pinecone/admin/eraser/resources/_deleteable_resource.py new file mode 100644 index 00000000..06d06a99 --- /dev/null +++ b/pinecone/admin/eraser/resources/_deleteable_resource.py @@ -0,0 +1,28 @@ +import abc + + +class _DeleteableResource(abc.ABC): + @abc.abstractmethod + def name(self): + """Get the singular form of the resource name""" + pass + + @abc.abstractmethod + def name_plural(self): + """Get the plural form of the resource name""" + pass + + @abc.abstractmethod + def delete(self, name): + """Delete the resource with the given name""" + pass + + @abc.abstractmethod + def get_state(self, name): + """Get the state of the resource with the given name""" + pass + + @abc.abstractmethod + def list(self): + """List all resources""" + pass diff --git a/pinecone/admin/eraser/resources/backup.py b/pinecone/admin/eraser/resources/backup.py new file mode 100644 index 00000000..12b8c4c6 --- /dev/null +++ b/pinecone/admin/eraser/resources/backup.py @@ -0,0 +1,30 @@ +from ._deleteable_resource import _DeleteableResource +from pinecone import Pinecone + + +class _DeleteableBackup(_DeleteableResource): + def __init__(self, pc: Pinecone): + self.pc = pc + + def name(self): + return "backup" + + def name_plural(self): + return "backups" + + def delete(self, name): + backup = self._get_backup_by_name(name) + return self.pc.db.backup.delete(backup_id=backup.backup_id) + + def get_state(self, name): + backup = self._get_backup_by_name(name) + return backup.status + + def list(self): + return self.pc.db.backup.list() + + def _get_backup_by_name(self, name): + for backup in self.pc.db.backup.list(): + if backup.name == name: + return backup + raise Exception(f"Backup {name} not found") diff --git a/pinecone/admin/eraser/resources/collection.py b/pinecone/admin/eraser/resources/collection.py new file mode 100644 index 00000000..419d562a --- /dev/null +++ b/pinecone/admin/eraser/resources/collection.py @@ -0,0 +1,23 @@ +from ._deleteable_resource import _DeleteableResource +from pinecone import Pinecone + + +class _DeleteableCollection(_DeleteableResource): + def __init__(self, pc: Pinecone): + self.pc = pc + + def name(self): + return "collection" + + def name_plural(self): + return "collections" + + def get_state(self, name): + desc = self.pc.db.collection.describe(name=name) + return desc["status"] + + def delete(self, name): + return self.pc.db.collection.delete(name=name) + + def list(self): + return self.pc.db.collection.list() diff --git a/pinecone/admin/eraser/resources/index.py b/pinecone/admin/eraser/resources/index.py new file mode 100644 index 00000000..53d5add2 --- /dev/null +++ b/pinecone/admin/eraser/resources/index.py @@ -0,0 +1,23 @@ +from pinecone import Pinecone +from ._deleteable_resource import _DeleteableResource + + +class _DeleteableIndex(_DeleteableResource): + def __init__(self, pc: Pinecone): + self.pc = pc + + def name(self): + return "index" + + def name_plural(self): + return "indexes" + + def delete(self, name): + return self.pc.db.index.delete(name=name) + + def get_state(self, name): + desc = self.pc.db.index.describe(name=name) + return desc["status"]["state"] + + def list(self): + return self.pc.db.index.list() diff --git a/pinecone/admin/eraser/retry_counter.py b/pinecone/admin/eraser/retry_counter.py new file mode 100644 index 00000000..e5da4721 --- /dev/null +++ b/pinecone/admin/eraser/retry_counter.py @@ -0,0 +1,15 @@ +class _RetryCounter: + def __init__(self, max_retries): + self.max_retries = max_retries + self.counts = {} + + def increment(self, key): + if key not in self.counts: + self.counts[key] = 0 + self.counts[key] += 1 + + def get_count(self, key): + return self.counts.get(key, 0) + + def is_maxed_out(self, key): + return self.get_count(key) >= self.max_retries diff --git a/pinecone/admin/project_eraser.py b/pinecone/admin/project_eraser.py deleted file mode 100644 index cb4473d0..00000000 --- a/pinecone/admin/project_eraser.py +++ /dev/null @@ -1,263 +0,0 @@ -import logging -from pinecone import Pinecone, NotFoundException -import time -from collections import deque - -logger = logging.getLogger(__name__) - - -class _RetryCounter: - def __init__(self, max_retries): - self.max_retries = max_retries - self.counts = {} - - def increment(self, key): - if key not in self.counts: - self.counts[key] = 0 - self.counts[key] += 1 - - def get_count(self, key): - return self.counts.get(key, 0) - - def is_maxed_out(self, key): - return self.get_count(key) >= self.max_retries - - -class _ProjectEraser: - """ - This class is used to delete all resources within a project - """ - - def __init__(self, api_key, max_retries=5, sleep_interval=0.5): - self.pc = Pinecone(api_key=api_key) - - # In situations where there are a lot of resources, we want to - # slow down the rate of requests just to avoid any concerns about - # rate limits - self.sleep_interval = sleep_interval - self.undeleteable_resources = [] - self.max_retries = max_retries - - def _pluralize(self, resource_name): - if resource_name.lower() == "index": - return resource_name + "es" - else: - return resource_name + "s" - - def _delete_all_of_resource(self, resource_name, list_func, delete_func, get_state_func): - resources_to_delete = deque(list_func()) - if len(resources_to_delete) == 0: - logger.info(f"No {self._pluralize(resource_name)} to delete") - return - - state_check_retries = _RetryCounter(self.max_retries) - failed_delete_retries = _RetryCounter(self.max_retries) - is_deletable_retries = _RetryCounter(self.max_retries) - is_terminating_retries = _RetryCounter(self.max_retries * 3) - - undeletable_resources = [] - - while len(resources_to_delete) > 0: - logger.info( - f"There are {len(resources_to_delete)} {self._pluralize(resource_name)} left to delete" - ) - time.sleep(self.sleep_interval) - - resource = resources_to_delete.popleft() - logger.info(f"Processing {resource_name} {resource.name}") - - # Get the latest description of the resource - try: - state_check_retries.increment(resource.name) - state = get_state_func(name=resource.name) - logger.info(f"{resource_name} {resource.name} has state {state}") - except NotFoundException: - logger.info(f"{resource_name} {resource.name} has already been deleted, continuing") - continue - except Exception as e: - if state_check_retries.is_maxed_out(resource.name): - logger.error(f"Error describing {resource_name} {resource.name}: {e}") - undeletable_resources.append( - { - "resource": resource, - "type": resource_name, - "reason": f"Error describing {resource_name} {resource.name}: {e}", - } - ) - continue - else: - logger.info( - f"{resource_name} {resource.name} has been returned to the back of the delete queue" - ) - resources_to_delete.append(resource) - continue - - if state == "Terminating" or state == "Terminated": - is_terminating_retries.increment(resource.name) - if is_terminating_retries.is_maxed_out(resource.name): - logger.error( - f"{resource_name} {resource.name} has been in the terminating state for too long, skipping" - ) - undeletable_resources.append( - { - "resource": resource, - "type": resource_name, - "reason": f"{resource_name} has been in the terminating state for too long", - } - ) - continue - else: - logger.info( - f"{resource_name} {resource.name} is in the process of being deleted, adding to the back of the delete queue to recheck later" - ) - resources_to_delete.append(resource) - continue - - # If the index is not in a deleteable state, add it to the back of the delete queue - deleteable_states = ["Ready", "InitializationFailed"] - if state not in deleteable_states: - is_deletable_retries.increment(resource.name) - if is_deletable_retries.is_maxed_out(resource.name): - attempts = is_deletable_retries.get_count(resource.name) - logger.error( - f"{resource_name} {resource.name} did not enter a deleteable state after {attempts} attempts, skipping" - ) - undeletable_resources.append( - { - "resource": resource, - "type": resource_name, - "reason": f"Not in a deleteable state after {attempts} attempts", - } - ) - continue - else: - logger.info( - f"{resource_name} {resource.name} state {state} is not deleteable, adding to the back of the delete queue" - ) - resources_to_delete.append(resource) - continue - - try: - logger.info(f"Attempting deleting of {resource_name} {resource.name}") - delete_func(name=resource.name) - logger.info(f"Successfully deleted {resource_name} {resource.name}") - except Exception as e: - logger.error(f"Error deleting {resource_name} {resource.name}: {e}") - failed_delete_retries.increment(resource.name) - if failed_delete_retries.is_maxed_out(resource.name): - attempts = failed_delete_retries.get_count(resource.name) - logger.error( - f"Failed to delete {resource_name} {resource.name} after {attempts} attempts, skipping" - ) - undeletable_resources.append( - { - "resource": resource, - "type": resource_name, - "reason": f"Failed to delete after {attempts} attempts", - } - ) - continue - else: - logger.info( - f"{resource_name} {resource.name} has been returned to the back of the delete queue" - ) - resources_to_delete.append(resource) - continue - - if len(undeletable_resources) > 0: - logger.error( - f"There were {len(undeletable_resources)} {self._pluralize(resource_name)} that were not deleted" - ) - for item in undeletable_resources: - logger.error( - f"{resource_name} {item['resource'].name} was not deleted because {item['reason']}" - ) - self.undeleteable_resources.append(item) - else: - logger.info(f"All {self._pluralize(resource_name)} were deleted successfully") - - def delete_all_indexes(self, force_delete=False): - index_list = self.pc.db.index.list() - if len(index_list) == 0: - logger.info("No indexes to delete") - return - - index_with_deletion_protection = [ - index for index in index_list if index.deletion_protection == "enabled" - ] - if not force_delete and len(index_with_deletion_protection) > 0: - logger.info( - "There are indexes with deletion protection enabled. You must disable deletion protection before the index can be deleted." - ) - raise Exception( - f"Indexes with deletion protection enabled cannot be deleted: {[i.name for i in index_with_deletion_protection]}" - ) - - for index in index_with_deletion_protection: - logger.info(f"Disabling deletion protection for Index {index.name}") - time.sleep(self.sleep_interval) - try: - self.pc.db.index.configure(name=index.name, deletion_protection="disabled") - except Exception as e: - logger.error(f"Error disabling deletion protection for Index {index.name}: {e}") - self.undeleteable_resources.append( - { - "resource": index, - "type": "index", - "reason": f"Failed to disable deletion protection: {e}", - } - ) - - def get_state_func(name): - desc = self.pc.db.index.describe(name=name) - return desc.status.state - - return self._delete_all_of_resource( - resource_name="index", - list_func=self.pc.db.index.list, - delete_func=self.pc.db.index.delete, - get_state_func=get_state_func, - ) - - def delete_all_collections(self): - def get_state_func(name): - desc = self.pc.db.collection.describe(name=name) - return desc["status"] - - return self._delete_all_of_resource( - resource_name="collection", - list_func=self.pc.db.collection.list, - delete_func=self.pc.db.collection.delete, - get_state_func=get_state_func, - ) - - def delete_all_backups(self): - def _get_backup_by_name(name): - for backup in self.pc.db.backup.list(): - if backup.name == name: - return backup - raise Exception(f"Backup {name} not found") - - def delete_func(name): - backup = _get_backup_by_name(name) - return self.pc.db.backup.delete(backup_id=backup.backup_id) - - def get_state_func(name): - backup = _get_backup_by_name(name) - return backup.status - - return self._delete_all_of_resource( - resource_name="backup", - list_func=self.pc.db.backup.list, - delete_func=delete_func, - get_state_func=get_state_func, - ) - - def retry_needed(self): - if len(self.undeleteable_resources) > 0: - logger.info( - f"Retry needed for {len(self.undeleteable_resources)} undeleteable resources" - ) - return True - else: - return False diff --git a/pinecone/admin/resources/project.py b/pinecone/admin/resources/project.py index cc7c8ca7..0f274df2 100644 --- a/pinecone/admin/resources/project.py +++ b/pinecone/admin/resources/project.py @@ -466,14 +466,14 @@ def delete( from .api_key import ApiKeyResource api_key_resource = ApiKeyResource(self._api_client) - logger.info(f"Creating API key 'cleanup-key' for project {project.id}") + logger.debug(f"Creating API key 'cleanup-key' for project {project.id}") key_create_response = api_key_resource.create( project_id=project.id, name="cleanup-key", roles=["ProjectEditor"] ) api_key = key_create_response.value try: - from ..project_eraser import _ProjectEraser + from ..eraser.project_eraser import _ProjectEraser done = False retries = 0 @@ -491,12 +491,13 @@ def delete( done = not project_eraser.retry_needed() retries += 1 if not done: - logger.info( + logger.debug( f"Retrying deletion of resources for project {project.id}. There were {len(project_eraser.undeleteable_resources)} undeleteable resources" ) time.sleep(30) finally: - logger.info(f"Deleting API key 'cleanup-key' for project {project.id}") + logger.debug(f"Deleting API key 'cleanup-key' for project {project.id}") api_key_resource.delete(api_key_id=key_create_response.key.id) + logger.info(f"Deleting project {project_id}") return self._projects_api.delete_project(project_id=project_id) From 85dcc10b48114606f8fd5fd118df55f1a92f005e Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 13 Jun 2025 12:32:19 -0400 Subject: [PATCH 6/7] Fix CI --- .../cleanup-all/cleanup-test-projects.py | 2 +- .../actions/project-delete/delete-project.py | 2 +- pinecone/admin/eraser/project_eraser.py | 96 +++++++++---------- 3 files changed, 49 insertions(+), 51 deletions(-) diff --git a/.github/actions/cleanup-all/cleanup-test-projects.py b/.github/actions/cleanup-all/cleanup-test-projects.py index bbf926a6..951b5039 100644 --- a/.github/actions/cleanup-all/cleanup-test-projects.py +++ b/.github/actions/cleanup-all/cleanup-test-projects.py @@ -1,6 +1,6 @@ import logging from pinecone import Admin -from pinecone.admin.project_eraser import _ProjectEraser +from pinecone.admin.eraser.project_eraser import _ProjectEraser import time import os diff --git a/.github/actions/project-delete/delete-project.py b/.github/actions/project-delete/delete-project.py index 8897e1d2..5bbc44e3 100644 --- a/.github/actions/project-delete/delete-project.py +++ b/.github/actions/project-delete/delete-project.py @@ -2,7 +2,7 @@ import logging import time from pinecone import Admin -from pinecone.admin.project_eraser import _ProjectEraser +from pinecone.admin.eraser.project_eraser import _ProjectEraser logging.basicConfig( level=logging.DEBUG, format="%(levelname)-8s | %(name)s:%(lineno)d | %(message)s" diff --git a/pinecone/admin/eraser/project_eraser.py b/pinecone/admin/eraser/project_eraser.py index 90ea19d0..2958bb05 100644 --- a/pinecone/admin/eraser/project_eraser.py +++ b/pinecone/admin/eraser/project_eraser.py @@ -23,6 +23,8 @@ class DeletionFailure(NamedTuple): class _ProjectEraser: """ This class is used to delete all resources within a project + + It should not be used directly, but rather through :func:`pinecone.admin.resources.ProjectResource.delete` """ def __init__(self, api_key, max_retries=5, sleep_interval=0.5): @@ -73,68 +75,70 @@ def _get_state(self, dr, resource, delete_queue): return (state, should_continue) def _check_if_terminating(self, state, dr, resource, delete_queue): - should_continue = False terminating_states = ["Terminating", "Terminated"] label = f"{dr.name()} {resource.name}" - if state in terminating_states: - self.is_terminating_retries.increment(resource.name) - if self.is_terminating_retries.is_maxed_out(resource.name): - logger.error(f"{label} has been in the terminating state for too long, skipping") - self.undeletable_resources.append( - DeletionFailure( - resource_type=dr.name(), - resource_name=resource.name, - reason=f"{label} has been in the terminating state for too long", - ) - ) - should_continue = True - else: - logger.debug( - f"{label} is in the process of being deleted, adding to the back of the delete queue to recheck later" + + if state not in terminating_states: + return False + + self.is_terminating_retries.increment(resource.name) + if self.is_terminating_retries.is_maxed_out(resource.name): + logger.error(f"{label} has been in the terminating state for too long, skipping") + self.undeletable_resources.append( + DeletionFailure( + resource_type=dr.name(), + resource_name=resource.name, + reason=f"{label} has been in the terminating state for too long", ) - delete_queue.append(resource) - should_continue = True + ) + else: + logger.debug( + f"{label} is in the process of being deleted, adding to the back of the delete queue to recheck later" + ) + delete_queue.append(resource) - return should_continue + return True def _check_if_deletable(self, state, dr, resource, delete_queue): - should_continue = False label = f"{dr.name()} {resource.name}" deleteable_states = ["Ready", "InitializationFailed"] - if state not in deleteable_states: - self.is_deletable_retries.increment(resource.name) - if self.is_deletable_retries.is_maxed_out(resource.name): - attempts = self.is_deletable_retries.get_count(resource.name) - logger.error( - f"{label} did not enter a deleteable state after {attempts} attempts, skipping" - ) - self.undeletable_resources.append( - DeletionFailure( - resource_type=dr.name(), - resource_name=resource.name, - reason=f"Not in a deleteable state after {attempts} attempts", - ) - ) - should_continue = True - else: - logger.debug( - f"{label} state {state} is not deleteable, adding to the back of the delete queue" + + if state in deleteable_states: + return False + + self.is_deletable_retries.increment(resource.name) + if self.is_deletable_retries.is_maxed_out(resource.name): + attempts = self.is_deletable_retries.get_count(resource.name) + logger.error( + f"{label} did not enter a deleteable state after {attempts} attempts, skipping" + ) + self.undeletable_resources.append( + DeletionFailure( + resource_type=dr.name(), + resource_name=resource.name, + reason=f"Not in a deleteable state after {attempts} attempts", ) - delete_queue.append(resource) - should_continue = True + ) + else: + logger.debug( + f"{label} state {state} is not deleteable, adding to the back of the delete queue" + ) + delete_queue.append(resource) - return should_continue + return True def _attempt_delete(self, dr, resource, delete_queue): - should_continue = False label = f"{dr.name()} {resource.name}" try: logger.debug(f"Attempting deleting of {label}") dr.delete(name=resource.name) logger.debug(f"Successfully deleted {label}") + except NotFoundException: + logger.debug(f"{label} has already been deleted, continuing") except Exception as e: logger.error(f"Error deleting {label}: {e}") self.failed_delete_retries.increment(resource.name) + if self.failed_delete_retries.is_maxed_out(resource.name): attempts = self.failed_delete_retries.get_count(resource.name) logger.error(f"Failed to delete {label} after {attempts} attempts, skipping") @@ -145,13 +149,9 @@ def _attempt_delete(self, dr, resource, delete_queue): reason=f"Failed to delete after {attempts} attempts", ) ) - should_continue = True else: logger.debug(f"{label} has been returned to the back of the delete queue") delete_queue.append(resource) - should_continue = True - - return should_continue def _log_final_state(self, dr): if len(self.undeletable_resources) > 0: @@ -196,9 +196,7 @@ def _delete_resource_type(self, dr: _DeleteableResource): continue # If the resource is deletable, delete it - should_continue = self._attempt_delete(dr, resource, delete_queue) - if should_continue: - continue + self._attempt_delete(dr, resource, delete_queue) self._log_final_state(dr) From a321e10c208ac1789e778eeb7b9213e552f925dd Mon Sep 17 00:00:00 2001 From: Jen Hamon Date: Fri, 13 Jun 2025 14:16:45 -0400 Subject: [PATCH 7/7] Combine import statements --- pinecone/admin/admin.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pinecone/admin/admin.py b/pinecone/admin/admin.py index b8827d0e..1f70fe0e 100644 --- a/pinecone/admin/admin.py +++ b/pinecone/admin/admin.py @@ -1,11 +1,10 @@ -from pinecone.config import OpenApiConfiguration +from pinecone.config import OpenApiConfiguration, Config from pinecone.openapi_support import ApiClient from pinecone.core.openapi.oauth import API_VERSION from pinecone.core.openapi.oauth.apis import OAuthApi from pinecone.core.openapi.oauth.models import TokenRequest from typing import Optional, Dict from pinecone.utils import get_user_agent -from pinecone.config import Config import os from copy import deepcopy