diff --git a/codegen/build-oas.sh b/codegen/build-oas.sh index e627b2d9e..3e06b1044 100755 --- a/codegen/build-oas.sh +++ b/codegen/build-oas.sh @@ -86,6 +86,123 @@ generate_client() { sed -i '' "s/bool, date, datetime, dict, float, int, list, str, none_type/bool, dict, float, int, list, str, none_type/g" "$file" done + # Fix invalid dict type annotations in return types and casts + # Replace {str: (bool, dict, float, int, list, str, none_type)} with Dict[str, Any] + find "${build_dir}" -name "*.py" | while IFS= read -r file; do + # Need to escape the braces and parentheses for sed + sed -i '' 's/{str: (bool, dict, float, int, list, str, none_type)}/Dict[str, Any]/g' "$file" + done + + # Remove globals() assignments from TYPE_CHECKING blocks + # These should only be in lazy_import() functions, not in TYPE_CHECKING blocks + find "${build_dir}" -name "*.py" | while IFS= read -r file; do + python3 < None: """Debug status :param value: The debug status, True or False. :type: bool """ - if hasattr(self, "_debug"): - previous_debug = self._debug - else: - previous_debug = None + previous_debug: Optional[bool] = getattr(self, "_debug", None) self._debug = value def enable_http_logging(): diff --git a/pinecone/core/openapi/admin/api/api_keys_api.py b/pinecone/core/openapi/admin/api/api_keys_api.py index e835e2793..13210a2a8 100644 --- a/pinecone/core/openapi/admin/api/api_keys_api.py +++ b/pinecone/core/openapi/admin/api/api_keys_api.py @@ -9,6 +9,11 @@ Contact: support@pinecone.io """ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, cast +from multiprocessing.pool import ApplyResult + from pinecone.openapi_support import ApiClient, AsyncioApiClient from pinecone.openapi_support.endpoint_utils import ( ExtraOpenApiKwargsTypedDict, @@ -48,7 +53,7 @@ def __create_api_key( create_api_key_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> APIKeyWithSecret | ApplyResult[APIKeyWithSecret]: """Create an API key # noqa: E501 Create a new API key for a project. Developers can use the API key to authenticate requests to Pinecone's Data Plane and Control Plane APIs. # noqa: E501 @@ -90,7 +95,9 @@ def __create_api_key( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["project_id"] = project_id kwargs["create_api_key_request"] = create_api_key_request - return self.call_with_http_info(**kwargs) + return cast( + APIKeyWithSecret | ApplyResult[APIKeyWithSecret], self.call_with_http_info(**kwargs) + ) self.create_api_key = _Endpoint( settings={ @@ -137,7 +144,7 @@ def __delete_api_key( api_key_id, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> None: """Delete an API key # noqa: E501 Delete an API key from a project. # noqa: E501 @@ -177,7 +184,7 @@ def __delete_api_key( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["api_key_id"] = api_key_id - return self.call_with_http_info(**kwargs) + return cast(None, self.call_with_http_info(**kwargs)) self.delete_api_key = _Endpoint( settings={ @@ -216,7 +223,7 @@ def __fetch_api_key( api_key_id, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> APIKey | ApplyResult[APIKey]: """Get API key details # noqa: E501 Get the details of an API key, excluding the API key secret. # noqa: E501 @@ -256,7 +263,7 @@ def __fetch_api_key( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["api_key_id"] = api_key_id - return self.call_with_http_info(**kwargs) + return cast(APIKey | ApplyResult[APIKey], self.call_with_http_info(**kwargs)) self.fetch_api_key = _Endpoint( settings={ @@ -295,7 +302,7 @@ def __list_project_api_keys( project_id, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> ListApiKeysResponse | ApplyResult[ListApiKeysResponse]: """List API keys # noqa: E501 List all API keys in a project. # noqa: E501 @@ -335,7 +342,10 @@ def __list_project_api_keys( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["project_id"] = project_id - return self.call_with_http_info(**kwargs) + return cast( + ListApiKeysResponse | ApplyResult[ListApiKeysResponse], + self.call_with_http_info(**kwargs), + ) self.list_project_api_keys = _Endpoint( settings={ @@ -375,7 +385,7 @@ def __update_api_key( update_api_key_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> APIKey | ApplyResult[APIKey]: """Update an API key # noqa: E501 Update the name and roles of an API key. # noqa: E501 @@ -417,7 +427,7 @@ def __update_api_key( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["api_key_id"] = api_key_id kwargs["update_api_key_request"] = update_api_key_request - return self.call_with_http_info(**kwargs) + return cast(APIKey | ApplyResult[APIKey], self.call_with_http_info(**kwargs)) self.update_api_key = _Endpoint( settings={ @@ -473,7 +483,7 @@ def __init__(self, api_client=None) -> None: async def __create_api_key( self, project_id, create_api_key_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> APIKeyWithSecret: """Create an API key # noqa: E501 Create a new API key for a project. Developers can use the API key to authenticate requests to Pinecone's Data Plane and Control Plane APIs. # noqa: E501 @@ -508,7 +518,7 @@ async def __create_api_key( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["project_id"] = project_id kwargs["create_api_key_request"] = create_api_key_request - return await self.call_with_http_info(**kwargs) + return cast(APIKeyWithSecret, await self.call_with_http_info(**kwargs)) self.create_api_key = _AsyncioEndpoint( settings={ @@ -550,7 +560,9 @@ async def __create_api_key( callable=__create_api_key, ) - async def __delete_api_key(self, api_key_id, x_pinecone_api_version="2025-10", **kwargs): + async def __delete_api_key( + self, api_key_id, x_pinecone_api_version="2025-10", **kwargs + ) -> None: """Delete an API key # noqa: E501 Delete an API key from a project. # noqa: E501 @@ -583,7 +595,7 @@ async def __delete_api_key(self, api_key_id, x_pinecone_api_version="2025-10", * self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["api_key_id"] = api_key_id - return await self.call_with_http_info(**kwargs) + return cast(None, await self.call_with_http_info(**kwargs)) self.delete_api_key = _AsyncioEndpoint( settings={ @@ -617,7 +629,9 @@ async def __delete_api_key(self, api_key_id, x_pinecone_api_version="2025-10", * callable=__delete_api_key, ) - async def __fetch_api_key(self, api_key_id, x_pinecone_api_version="2025-10", **kwargs): + async def __fetch_api_key( + self, api_key_id, x_pinecone_api_version="2025-10", **kwargs + ) -> APIKey: """Get API key details # noqa: E501 Get the details of an API key, excluding the API key secret. # noqa: E501 @@ -650,7 +664,7 @@ async def __fetch_api_key(self, api_key_id, x_pinecone_api_version="2025-10", ** self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["api_key_id"] = api_key_id - return await self.call_with_http_info(**kwargs) + return cast(APIKey, await self.call_with_http_info(**kwargs)) self.fetch_api_key = _AsyncioEndpoint( settings={ @@ -686,7 +700,7 @@ async def __fetch_api_key(self, api_key_id, x_pinecone_api_version="2025-10", ** async def __list_project_api_keys( self, project_id, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> ListApiKeysResponse: """List API keys # noqa: E501 List all API keys in a project. # noqa: E501 @@ -719,7 +733,7 @@ async def __list_project_api_keys( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["project_id"] = project_id - return await self.call_with_http_info(**kwargs) + return cast(ListApiKeysResponse, await self.call_with_http_info(**kwargs)) self.list_project_api_keys = _AsyncioEndpoint( settings={ @@ -755,7 +769,7 @@ async def __list_project_api_keys( async def __update_api_key( self, api_key_id, update_api_key_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> APIKey: """Update an API key # noqa: E501 Update the name and roles of an API key. # noqa: E501 @@ -790,7 +804,7 @@ async def __update_api_key( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["api_key_id"] = api_key_id kwargs["update_api_key_request"] = update_api_key_request - return await self.call_with_http_info(**kwargs) + return cast(APIKey, await self.call_with_http_info(**kwargs)) self.update_api_key = _AsyncioEndpoint( settings={ diff --git a/pinecone/core/openapi/admin/api/organizations_api.py b/pinecone/core/openapi/admin/api/organizations_api.py index c3cca33c3..cdbc7a8d3 100644 --- a/pinecone/core/openapi/admin/api/organizations_api.py +++ b/pinecone/core/openapi/admin/api/organizations_api.py @@ -9,6 +9,11 @@ Contact: support@pinecone.io """ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, cast +from multiprocessing.pool import ApplyResult + from pinecone.openapi_support import ApiClient, AsyncioApiClient from pinecone.openapi_support.endpoint_utils import ( ExtraOpenApiKwargsTypedDict, @@ -45,7 +50,7 @@ def __delete_organization( organization_id, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> None: """Delete an organization # noqa: E501 Delete an organization and all its associated configuration. Before deleting an organization, you must delete all projects (including indexes, assistants, backups, and collections) associated with the organization. # noqa: E501 @@ -85,7 +90,7 @@ def __delete_organization( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["organization_id"] = organization_id - return self.call_with_http_info(**kwargs) + return cast(None, self.call_with_http_info(**kwargs)) self.delete_organization = _Endpoint( settings={ @@ -124,7 +129,7 @@ def __fetch_organization( organization_id, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> Organization | ApplyResult[Organization]: """Get organization details # noqa: E501 Get details about an organization. # noqa: E501 @@ -164,7 +169,9 @@ def __fetch_organization( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["organization_id"] = organization_id - return self.call_with_http_info(**kwargs) + return cast( + Organization | ApplyResult[Organization], self.call_with_http_info(**kwargs) + ) self.fetch_organization = _Endpoint( settings={ @@ -200,7 +207,7 @@ def __fetch_organization( def __list_organizations( self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> OrganizationList | ApplyResult[OrganizationList]: """List organizations # noqa: E501 List all organizations associated with an account. # noqa: E501 @@ -238,7 +245,9 @@ def __list_organizations( """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return self.call_with_http_info(**kwargs) + return cast( + OrganizationList | ApplyResult[OrganizationList], self.call_with_http_info(**kwargs) + ) self.list_organizations = _Endpoint( settings={ @@ -275,7 +284,7 @@ def __update_organization( update_organization_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> Organization | ApplyResult[Organization]: """Update an organization # noqa: E501 Update an organization's name. # noqa: E501 @@ -317,7 +326,9 @@ def __update_organization( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["organization_id"] = organization_id kwargs["update_organization_request"] = update_organization_request - return self.call_with_http_info(**kwargs) + return cast( + Organization | ApplyResult[Organization], self.call_with_http_info(**kwargs) + ) self.update_organization = _Endpoint( settings={ @@ -377,7 +388,7 @@ def __init__(self, api_client=None) -> None: async def __delete_organization( self, organization_id, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> None: """Delete an organization # noqa: E501 Delete an organization and all its associated configuration. Before deleting an organization, you must delete all projects (including indexes, assistants, backups, and collections) associated with the organization. # noqa: E501 @@ -410,7 +421,7 @@ async def __delete_organization( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["organization_id"] = organization_id - return await self.call_with_http_info(**kwargs) + return cast(None, await self.call_with_http_info(**kwargs)) self.delete_organization = _AsyncioEndpoint( settings={ @@ -446,7 +457,7 @@ async def __delete_organization( async def __fetch_organization( self, organization_id, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> Organization: """Get organization details # noqa: E501 Get details about an organization. # noqa: E501 @@ -479,7 +490,7 @@ async def __fetch_organization( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["organization_id"] = organization_id - return await self.call_with_http_info(**kwargs) + return cast(Organization, await self.call_with_http_info(**kwargs)) self.fetch_organization = _AsyncioEndpoint( settings={ @@ -513,7 +524,9 @@ async def __fetch_organization( callable=__fetch_organization, ) - async def __list_organizations(self, x_pinecone_api_version="2025-10", **kwargs): + async def __list_organizations( + self, x_pinecone_api_version="2025-10", **kwargs + ) -> OrganizationList: """List organizations # noqa: E501 List all organizations associated with an account. # noqa: E501 @@ -544,7 +557,7 @@ async def __list_organizations(self, x_pinecone_api_version="2025-10", **kwargs) """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return await self.call_with_http_info(**kwargs) + return cast(OrganizationList, await self.call_with_http_info(**kwargs)) self.list_organizations = _AsyncioEndpoint( settings={ @@ -581,7 +594,7 @@ async def __update_organization( update_organization_request, x_pinecone_api_version="2025-10", **kwargs, - ): + ) -> Organization: """Update an organization # noqa: E501 Update an organization's name. # noqa: E501 @@ -616,7 +629,7 @@ async def __update_organization( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["organization_id"] = organization_id kwargs["update_organization_request"] = update_organization_request - return await self.call_with_http_info(**kwargs) + return cast(Organization, await self.call_with_http_info(**kwargs)) self.update_organization = _AsyncioEndpoint( settings={ diff --git a/pinecone/core/openapi/admin/api/projects_api.py b/pinecone/core/openapi/admin/api/projects_api.py index ee2a9be6a..1d1849ddf 100644 --- a/pinecone/core/openapi/admin/api/projects_api.py +++ b/pinecone/core/openapi/admin/api/projects_api.py @@ -9,6 +9,11 @@ Contact: support@pinecone.io """ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, cast +from multiprocessing.pool import ApplyResult + from pinecone.openapi_support import ApiClient, AsyncioApiClient from pinecone.openapi_support.endpoint_utils import ( ExtraOpenApiKwargsTypedDict, @@ -46,7 +51,7 @@ def __create_project( create_project_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> Project | ApplyResult[Project]: """Create a new project # noqa: E501 Creates a new project. # noqa: E501 @@ -86,7 +91,7 @@ def __create_project( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["create_project_request"] = create_project_request - return self.call_with_http_info(**kwargs) + return cast(Project | ApplyResult[Project], self.call_with_http_info(**kwargs)) self.create_project = _Endpoint( settings={ @@ -128,7 +133,7 @@ def __delete_project( project_id, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> None: """Delete a project # noqa: E501 Delete a project and all its associated configuration. Before deleting a project, you must delete all indexes, assistants, backups, and collections associated with the project. Other project resources, such as API keys, are automatically deleted when the project is deleted. # noqa: E501 @@ -168,7 +173,7 @@ def __delete_project( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["project_id"] = project_id - return self.call_with_http_info(**kwargs) + return cast(None, self.call_with_http_info(**kwargs)) self.delete_project = _Endpoint( settings={ @@ -207,7 +212,7 @@ def __fetch_project( project_id, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> Project | ApplyResult[Project]: """Get project details # noqa: E501 Get details about a project. # noqa: E501 @@ -247,7 +252,7 @@ def __fetch_project( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["project_id"] = project_id - return self.call_with_http_info(**kwargs) + return cast(Project | ApplyResult[Project], self.call_with_http_info(**kwargs)) self.fetch_project = _Endpoint( settings={ @@ -283,7 +288,7 @@ def __fetch_project( def __list_projects( self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> ProjectList | ApplyResult[ProjectList]: """List projects # noqa: E501 List all projects in an organization. # noqa: E501 @@ -321,7 +326,7 @@ def __list_projects( """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return self.call_with_http_info(**kwargs) + return cast(ProjectList | ApplyResult[ProjectList], self.call_with_http_info(**kwargs)) self.list_projects = _Endpoint( settings={ @@ -358,7 +363,7 @@ def __update_project( update_project_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> Project | ApplyResult[Project]: """Update a project # noqa: E501 Update a project's configuration details. You can update the project's name, maximum number of Pods, or enable encryption with a customer-managed encryption key (CMEK). # noqa: E501 @@ -400,7 +405,7 @@ def __update_project( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["project_id"] = project_id kwargs["update_project_request"] = update_project_request - return self.call_with_http_info(**kwargs) + return cast(Project | ApplyResult[Project], self.call_with_http_info(**kwargs)) self.update_project = _Endpoint( settings={ @@ -456,7 +461,7 @@ def __init__(self, api_client=None) -> None: async def __create_project( self, create_project_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> Project: """Create a new project # noqa: E501 Creates a new project. # noqa: E501 @@ -489,7 +494,7 @@ async def __create_project( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["create_project_request"] = create_project_request - return await self.call_with_http_info(**kwargs) + return cast(Project, await self.call_with_http_info(**kwargs)) self.create_project = _AsyncioEndpoint( settings={ @@ -526,7 +531,9 @@ async def __create_project( callable=__create_project, ) - async def __delete_project(self, project_id, x_pinecone_api_version="2025-10", **kwargs): + async def __delete_project( + self, project_id, x_pinecone_api_version="2025-10", **kwargs + ) -> None: """Delete a project # noqa: E501 Delete a project and all its associated configuration. Before deleting a project, you must delete all indexes, assistants, backups, and collections associated with the project. Other project resources, such as API keys, are automatically deleted when the project is deleted. # noqa: E501 @@ -559,7 +566,7 @@ async def __delete_project(self, project_id, x_pinecone_api_version="2025-10", * self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["project_id"] = project_id - return await self.call_with_http_info(**kwargs) + return cast(None, await self.call_with_http_info(**kwargs)) self.delete_project = _AsyncioEndpoint( settings={ @@ -593,7 +600,9 @@ async def __delete_project(self, project_id, x_pinecone_api_version="2025-10", * callable=__delete_project, ) - async def __fetch_project(self, project_id, x_pinecone_api_version="2025-10", **kwargs): + async def __fetch_project( + self, project_id, x_pinecone_api_version="2025-10", **kwargs + ) -> Project: """Get project details # noqa: E501 Get details about a project. # noqa: E501 @@ -626,7 +635,7 @@ async def __fetch_project(self, project_id, x_pinecone_api_version="2025-10", ** self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["project_id"] = project_id - return await self.call_with_http_info(**kwargs) + return cast(Project, await self.call_with_http_info(**kwargs)) self.fetch_project = _AsyncioEndpoint( settings={ @@ -660,7 +669,7 @@ async def __fetch_project(self, project_id, x_pinecone_api_version="2025-10", ** callable=__fetch_project, ) - async def __list_projects(self, x_pinecone_api_version="2025-10", **kwargs): + async def __list_projects(self, x_pinecone_api_version="2025-10", **kwargs) -> ProjectList: """List projects # noqa: E501 List all projects in an organization. # noqa: E501 @@ -691,7 +700,7 @@ async def __list_projects(self, x_pinecone_api_version="2025-10", **kwargs): """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return await self.call_with_http_info(**kwargs) + return cast(ProjectList, await self.call_with_http_info(**kwargs)) self.list_projects = _AsyncioEndpoint( settings={ @@ -724,7 +733,7 @@ async def __list_projects(self, x_pinecone_api_version="2025-10", **kwargs): async def __update_project( self, project_id, update_project_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> Project: """Update a project # noqa: E501 Update a project's configuration details. You can update the project's name, maximum number of Pods, or enable encryption with a customer-managed encryption key (CMEK). # noqa: E501 @@ -759,7 +768,7 @@ async def __update_project( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["project_id"] = project_id kwargs["update_project_request"] = update_project_request - return await self.call_with_http_info(**kwargs) + return cast(Project, await self.call_with_http_info(**kwargs)) self.update_project = _AsyncioEndpoint( settings={ diff --git a/pinecone/core/openapi/admin/model/api_key.py b/pinecone/core/openapi/admin/model/api_key.py index 33ad8554d..bb8f27137 100644 --- a/pinecone/core/openapi/admin/model/api_key.py +++ b/pinecone/core/openapi/admin/model/api_key.py @@ -105,6 +105,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of APIKey. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], id, name, project_id, roles, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/admin/model/api_key_with_secret.py b/pinecone/core/openapi/admin/model/api_key_with_secret.py index e74471a3e..5f4afa2ab 100644 --- a/pinecone/core/openapi/admin/model/api_key_with_secret.py +++ b/pinecone/core/openapi/admin/model/api_key_with_secret.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.admin.model.api_key import APIKey + def lazy_import(): from pinecone.core.openapi.admin.model.api_key import APIKey @@ -109,6 +114,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of APIKeyWithSecret. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], key, value, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/admin/model/create_api_key_request.py b/pinecone/core/openapi/admin/model/create_api_key_request.py index 5a88a0bcd..bc24a641b 100644 --- a/pinecone/core/openapi/admin/model/create_api_key_request.py +++ b/pinecone/core/openapi/admin/model/create_api_key_request.py @@ -103,6 +103,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of CreateAPIKeyRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], name, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/admin/model/create_project_request.py b/pinecone/core/openapi/admin/model/create_project_request.py index e6f710c3f..b0574e27c 100644 --- a/pinecone/core/openapi/admin/model/create_project_request.py +++ b/pinecone/core/openapi/admin/model/create_project_request.py @@ -105,6 +105,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of CreateProjectRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], name, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/admin/model/error_response.py b/pinecone/core/openapi/admin/model/error_response.py index 062b3e6b4..e0684b5c8 100644 --- a/pinecone/core/openapi/admin/model/error_response.py +++ b/pinecone/core/openapi/admin/model/error_response.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.admin.model.error_response_error import ErrorResponseError + def lazy_import(): from pinecone.core.openapi.admin.model.error_response_error import ErrorResponseError @@ -109,6 +114,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ErrorResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/admin/model/error_response_error.py b/pinecone/core/openapi/admin/model/error_response_error.py index e83454ee0..6443ea723 100644 --- a/pinecone/core/openapi/admin/model/error_response_error.py +++ b/pinecone/core/openapi/admin/model/error_response_error.py @@ -86,7 +86,7 @@ def openapi_types(cls): return { "code": (str,), # noqa: E501 "message": (str,), # noqa: E501 - "details": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "details": (Dict[str, Any],), # noqa: E501 } @cached_class_property @@ -103,6 +103,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ErrorResponseError. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # noqa: E501 @@ -143,7 +154,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 + details (Dict[str, Any]): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) @@ -239,7 +250,7 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 + details (Dict[str, Any]): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) diff --git a/pinecone/core/openapi/admin/model/list_api_keys_response.py b/pinecone/core/openapi/admin/model/list_api_keys_response.py index dcda7c011..3b83213f8 100644 --- a/pinecone/core/openapi/admin/model/list_api_keys_response.py +++ b/pinecone/core/openapi/admin/model/list_api_keys_response.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.admin.model.api_key import APIKey + def lazy_import(): from pinecone.core.openapi.admin.model.api_key import APIKey @@ -107,6 +112,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ListApiKeysResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], data, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/admin/model/organization.py b/pinecone/core/openapi/admin/model/organization.py index 63e3da5b3..2b2fdaa78 100644 --- a/pinecone/core/openapi/admin/model/organization.py +++ b/pinecone/core/openapi/admin/model/organization.py @@ -111,6 +111,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of Organization. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data( diff --git a/pinecone/core/openapi/admin/model/organization_list.py b/pinecone/core/openapi/admin/model/organization_list.py index 49a6846a6..ad7141554 100644 --- a/pinecone/core/openapi/admin/model/organization_list.py +++ b/pinecone/core/openapi/admin/model/organization_list.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.admin.model.organization import Organization + def lazy_import(): from pinecone.core.openapi.admin.model.organization import Organization @@ -107,6 +112,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of OrganizationList. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], data, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/admin/model/project.py b/pinecone/core/openapi/admin/model/project.py index 2fc158e0f..7a641d12a 100644 --- a/pinecone/core/openapi/admin/model/project.py +++ b/pinecone/core/openapi/admin/model/project.py @@ -111,6 +111,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of Project. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data( diff --git a/pinecone/core/openapi/admin/model/project_list.py b/pinecone/core/openapi/admin/model/project_list.py index 2d06bc505..4811ef4d0 100644 --- a/pinecone/core/openapi/admin/model/project_list.py +++ b/pinecone/core/openapi/admin/model/project_list.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.admin.model.project import Project + def lazy_import(): from pinecone.core.openapi.admin.model.project import Project @@ -107,6 +112,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ProjectList. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], data, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/admin/model/update_api_key_request.py b/pinecone/core/openapi/admin/model/update_api_key_request.py index 68d0cea83..101164ce4 100644 --- a/pinecone/core/openapi/admin/model/update_api_key_request.py +++ b/pinecone/core/openapi/admin/model/update_api_key_request.py @@ -103,6 +103,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of UpdateAPIKeyRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/admin/model/update_organization_request.py b/pinecone/core/openapi/admin/model/update_organization_request.py index ce0095cd3..a537961b0 100644 --- a/pinecone/core/openapi/admin/model/update_organization_request.py +++ b/pinecone/core/openapi/admin/model/update_organization_request.py @@ -101,6 +101,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of UpdateOrganizationRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/admin/model/update_project_request.py b/pinecone/core/openapi/admin/model/update_project_request.py index 20e8ae2a1..0cec6bc2d 100644 --- a/pinecone/core/openapi/admin/model/update_project_request.py +++ b/pinecone/core/openapi/admin/model/update_project_request.py @@ -105,6 +105,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of UpdateProjectRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/api/manage_indexes_api.py b/pinecone/core/openapi/db_control/api/manage_indexes_api.py index c4e75a45b..8190a4559 100644 --- a/pinecone/core/openapi/db_control/api/manage_indexes_api.py +++ b/pinecone/core/openapi/db_control/api/manage_indexes_api.py @@ -9,6 +9,11 @@ Contact: support@pinecone.io """ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, cast +from multiprocessing.pool import ApplyResult + from pinecone.openapi_support import ApiClient, AsyncioApiClient from pinecone.openapi_support.endpoint_utils import ( ExtraOpenApiKwargsTypedDict, @@ -64,7 +69,7 @@ def __configure_index( configure_index_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> IndexModel | ApplyResult[IndexModel]: """Configure an index # noqa: E501 Configure an existing index. For serverless indexes, you can configure index deletion protection, tags, and integrated inference embedding settings for the index. For pod-based indexes, you can configure the pod size, number of replicas, tags, and index deletion protection. It is not possible to change the pod type of a pod-based index. However, you can create a collection from a pod-based index and then [create a new pod-based index with a different pod type](http://docs.pinecone.io/guides/indexes/pods/create-a-pod-based-index#create-a-pod-index-from-a-collection) from the collection. For guidance and examples, see [Configure an index](http://docs.pinecone.io/guides/indexes/pods/manage-pod-based-indexes). # noqa: E501 @@ -106,7 +111,7 @@ def __configure_index( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["index_name"] = index_name kwargs["configure_index_request"] = configure_index_request - return self.call_with_http_info(**kwargs) + return cast(IndexModel | ApplyResult[IndexModel], self.call_with_http_info(**kwargs)) self.configure_index = _Endpoint( settings={ @@ -154,7 +159,7 @@ def __create_backup( create_backup_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> BackupModel | ApplyResult[BackupModel]: """Create a backup of an index # noqa: E501 Create a backup of an index. # noqa: E501 @@ -196,7 +201,7 @@ def __create_backup( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["index_name"] = index_name kwargs["create_backup_request"] = create_backup_request - return self.call_with_http_info(**kwargs) + return cast(BackupModel | ApplyResult[BackupModel], self.call_with_http_info(**kwargs)) self.create_backup = _Endpoint( settings={ @@ -243,7 +248,7 @@ def __create_collection( create_collection_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> CollectionModel | ApplyResult[CollectionModel]: """Create a collection # noqa: E501 Create a Pinecone collection. Serverless indexes do not support collections. # noqa: E501 @@ -283,7 +288,9 @@ def __create_collection( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["create_collection_request"] = create_collection_request - return self.call_with_http_info(**kwargs) + return cast( + CollectionModel | ApplyResult[CollectionModel], self.call_with_http_info(**kwargs) + ) self.create_collection = _Endpoint( settings={ @@ -325,7 +332,7 @@ def __create_index( create_index_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> IndexModel | ApplyResult[IndexModel]: """Create an index # noqa: E501 Create a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/index-data/create-an-index). # noqa: E501 @@ -365,7 +372,7 @@ def __create_index( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["create_index_request"] = create_index_request - return self.call_with_http_info(**kwargs) + return cast(IndexModel | ApplyResult[IndexModel], self.call_with_http_info(**kwargs)) self.create_index = _Endpoint( settings={ @@ -407,7 +414,7 @@ def __create_index_for_model( create_index_for_model_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> IndexModel | ApplyResult[IndexModel]: """Create an index with integrated embedding # noqa: E501 Create an index with integrated embedding. With this type of index, you provide source text, and Pinecone uses a [hosted embedding model](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) to convert the text automatically during [upsert](https://docs.pinecone.io/reference/api/2025-10/data-plane/upsert_records) and [search](https://docs.pinecone.io/reference/api/2025-10/data-plane/search_records). For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/index-data/create-an-index#integrated-embedding). # noqa: E501 @@ -447,7 +454,7 @@ def __create_index_for_model( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["create_index_for_model_request"] = create_index_for_model_request - return self.call_with_http_info(**kwargs) + return cast(IndexModel | ApplyResult[IndexModel], self.call_with_http_info(**kwargs)) self.create_index_for_model = _Endpoint( settings={ @@ -490,7 +497,7 @@ def __create_index_from_backup_operation( create_index_from_backup_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> CreateIndexFromBackupResponse | ApplyResult[CreateIndexFromBackupResponse]: """Create an index from a backup # noqa: E501 Create an index from a backup. # noqa: E501 @@ -532,7 +539,10 @@ def __create_index_from_backup_operation( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["backup_id"] = backup_id kwargs["create_index_from_backup_request"] = create_index_from_backup_request - return self.call_with_http_info(**kwargs) + return cast( + CreateIndexFromBackupResponse | ApplyResult[CreateIndexFromBackupResponse], + self.call_with_http_info(**kwargs), + ) self.create_index_from_backup_operation = _Endpoint( settings={ @@ -580,7 +590,7 @@ def __create_index_from_backup_operation( def __delete_backup( self, backup_id, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> None: """Delete a backup # noqa: E501 Delete a backup. # noqa: E501 @@ -620,7 +630,7 @@ def __delete_backup( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["backup_id"] = backup_id - return self.call_with_http_info(**kwargs) + return cast(None, self.call_with_http_info(**kwargs)) self.delete_backup = _Endpoint( settings={ @@ -659,7 +669,7 @@ def __delete_collection( collection_name, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> None: """Delete a collection # noqa: E501 Delete an existing collection. Serverless indexes do not support collections. # noqa: E501 @@ -699,7 +709,7 @@ def __delete_collection( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["collection_name"] = collection_name - return self.call_with_http_info(**kwargs) + return cast(None, self.call_with_http_info(**kwargs)) self.delete_collection = _Endpoint( settings={ @@ -738,7 +748,7 @@ def __delete_index( index_name, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> None: """Delete an index # noqa: E501 Delete an existing index. # noqa: E501 @@ -778,7 +788,7 @@ def __delete_index( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["index_name"] = index_name - return self.call_with_http_info(**kwargs) + return cast(None, self.call_with_http_info(**kwargs)) self.delete_index = _Endpoint( settings={ @@ -814,7 +824,7 @@ def __delete_index( def __describe_backup( self, backup_id, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> BackupModel | ApplyResult[BackupModel]: """Describe a backup # noqa: E501 Get a description of a backup. # noqa: E501 @@ -854,7 +864,7 @@ def __describe_backup( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["backup_id"] = backup_id - return self.call_with_http_info(**kwargs) + return cast(BackupModel | ApplyResult[BackupModel], self.call_with_http_info(**kwargs)) self.describe_backup = _Endpoint( settings={ @@ -893,7 +903,7 @@ def __describe_collection( collection_name, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> CollectionModel | ApplyResult[CollectionModel]: """Describe a collection # noqa: E501 Get a description of a collection. Serverless indexes do not support collections. # noqa: E501 @@ -933,7 +943,9 @@ def __describe_collection( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["collection_name"] = collection_name - return self.call_with_http_info(**kwargs) + return cast( + CollectionModel | ApplyResult[CollectionModel], self.call_with_http_info(**kwargs) + ) self.describe_collection = _Endpoint( settings={ @@ -972,7 +984,7 @@ def __describe_index( index_name, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> IndexModel | ApplyResult[IndexModel]: """Describe an index # noqa: E501 Get a description of an index. # noqa: E501 @@ -1012,7 +1024,7 @@ def __describe_index( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["index_name"] = index_name - return self.call_with_http_info(**kwargs) + return cast(IndexModel | ApplyResult[IndexModel], self.call_with_http_info(**kwargs)) self.describe_index = _Endpoint( settings={ @@ -1048,7 +1060,7 @@ def __describe_index( def __describe_restore_job( self, job_id, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> RestoreJobModel | ApplyResult[RestoreJobModel]: """Describe a restore job # noqa: E501 Get a description of a restore job. # noqa: E501 @@ -1088,7 +1100,9 @@ def __describe_restore_job( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["job_id"] = job_id - return self.call_with_http_info(**kwargs) + return cast( + RestoreJobModel | ApplyResult[RestoreJobModel], self.call_with_http_info(**kwargs) + ) self.describe_restore_job = _Endpoint( settings={ @@ -1124,7 +1138,7 @@ def __describe_restore_job( def __list_collections( self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> CollectionList | ApplyResult[CollectionList]: """List collections # noqa: E501 List all collections in a project. Serverless indexes do not support collections. # noqa: E501 @@ -1162,7 +1176,9 @@ def __list_collections( """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return self.call_with_http_info(**kwargs) + return cast( + CollectionList | ApplyResult[CollectionList], self.call_with_http_info(**kwargs) + ) self.list_collections = _Endpoint( settings={ @@ -1198,7 +1214,7 @@ def __list_index_backups( index_name, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> BackupList | ApplyResult[BackupList]: """List backups for an index # noqa: E501 List all backups for an index. # noqa: E501 @@ -1240,7 +1256,7 @@ def __list_index_backups( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["index_name"] = index_name - return self.call_with_http_info(**kwargs) + return cast(BackupList | ApplyResult[BackupList], self.call_with_http_info(**kwargs)) self.list_index_backups = _Endpoint( settings={ @@ -1288,7 +1304,7 @@ def __list_index_backups( def __list_indexes( self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> IndexList | ApplyResult[IndexList]: """List indexes # noqa: E501 List all indexes in a project. # noqa: E501 @@ -1326,7 +1342,7 @@ def __list_indexes( """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return self.call_with_http_info(**kwargs) + return cast(IndexList | ApplyResult[IndexList], self.call_with_http_info(**kwargs)) self.list_indexes = _Endpoint( settings={ @@ -1359,7 +1375,7 @@ def __list_indexes( def __list_project_backups( self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> BackupList | ApplyResult[BackupList]: """List backups for all indexes in a project # noqa: E501 List all backups for a project. # noqa: E501 @@ -1399,7 +1415,7 @@ def __list_project_backups( """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return self.call_with_http_info(**kwargs) + return cast(BackupList | ApplyResult[BackupList], self.call_with_http_info(**kwargs)) self.list_project_backups = _Endpoint( settings={ @@ -1444,7 +1460,7 @@ def __list_project_backups( def __list_restore_jobs( self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> RestoreJobList | ApplyResult[RestoreJobList]: """List restore jobs # noqa: E501 List all restore jobs for a project. # noqa: E501 @@ -1484,7 +1500,9 @@ def __list_restore_jobs( """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return self.call_with_http_info(**kwargs) + return cast( + RestoreJobList | ApplyResult[RestoreJobList], self.call_with_http_info(**kwargs) + ) self.list_restore_jobs = _Endpoint( settings={ @@ -1541,7 +1559,7 @@ def __init__(self, api_client=None) -> None: async def __configure_index( self, index_name, configure_index_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> IndexModel: """Configure an index # noqa: E501 Configure an existing index. For serverless indexes, you can configure index deletion protection, tags, and integrated inference embedding settings for the index. For pod-based indexes, you can configure the pod size, number of replicas, tags, and index deletion protection. It is not possible to change the pod type of a pod-based index. However, you can create a collection from a pod-based index and then [create a new pod-based index with a different pod type](http://docs.pinecone.io/guides/indexes/pods/create-a-pod-based-index#create-a-pod-index-from-a-collection) from the collection. For guidance and examples, see [Configure an index](http://docs.pinecone.io/guides/indexes/pods/manage-pod-based-indexes). # noqa: E501 @@ -1576,7 +1594,7 @@ async def __configure_index( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["index_name"] = index_name kwargs["configure_index_request"] = configure_index_request - return await self.call_with_http_info(**kwargs) + return cast(IndexModel, await self.call_with_http_info(**kwargs)) self.configure_index = _AsyncioEndpoint( settings={ @@ -1620,7 +1638,7 @@ async def __configure_index( async def __create_backup( self, index_name, create_backup_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> BackupModel: """Create a backup of an index # noqa: E501 Create a backup of an index. # noqa: E501 @@ -1655,7 +1673,7 @@ async def __create_backup( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["index_name"] = index_name kwargs["create_backup_request"] = create_backup_request - return await self.call_with_http_info(**kwargs) + return cast(BackupModel, await self.call_with_http_info(**kwargs)) self.create_backup = _AsyncioEndpoint( settings={ @@ -1699,7 +1717,7 @@ async def __create_backup( async def __create_collection( self, create_collection_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> CollectionModel: """Create a collection # noqa: E501 Create a Pinecone collection. Serverless indexes do not support collections. # noqa: E501 @@ -1732,7 +1750,7 @@ async def __create_collection( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["create_collection_request"] = create_collection_request - return await self.call_with_http_info(**kwargs) + return cast(CollectionModel, await self.call_with_http_info(**kwargs)) self.create_collection = _AsyncioEndpoint( settings={ @@ -1771,7 +1789,7 @@ async def __create_collection( async def __create_index( self, create_index_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> IndexModel: """Create an index # noqa: E501 Create a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/index-data/create-an-index). # noqa: E501 @@ -1804,7 +1822,7 @@ async def __create_index( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["create_index_request"] = create_index_request - return await self.call_with_http_info(**kwargs) + return cast(IndexModel, await self.call_with_http_info(**kwargs)) self.create_index = _AsyncioEndpoint( settings={ @@ -1843,7 +1861,7 @@ async def __create_index( async def __create_index_for_model( self, create_index_for_model_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> IndexModel: """Create an index with integrated embedding # noqa: E501 Create an index with integrated embedding. With this type of index, you provide source text, and Pinecone uses a [hosted embedding model](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) to convert the text automatically during [upsert](https://docs.pinecone.io/reference/api/2025-10/data-plane/upsert_records) and [search](https://docs.pinecone.io/reference/api/2025-10/data-plane/search_records). For guidance and examples, see [Create an index](https://docs.pinecone.io/guides/index-data/create-an-index#integrated-embedding). # noqa: E501 @@ -1876,7 +1894,7 @@ async def __create_index_for_model( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["create_index_for_model_request"] = create_index_for_model_request - return await self.call_with_http_info(**kwargs) + return cast(IndexModel, await self.call_with_http_info(**kwargs)) self.create_index_for_model = _AsyncioEndpoint( settings={ @@ -1919,7 +1937,7 @@ async def __create_index_from_backup_operation( create_index_from_backup_request, x_pinecone_api_version="2025-10", **kwargs, - ): + ) -> CreateIndexFromBackupResponse: """Create an index from a backup # noqa: E501 Create an index from a backup. # noqa: E501 @@ -1954,7 +1972,7 @@ async def __create_index_from_backup_operation( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["backup_id"] = backup_id kwargs["create_index_from_backup_request"] = create_index_from_backup_request - return await self.call_with_http_info(**kwargs) + return cast(CreateIndexFromBackupResponse, await self.call_with_http_info(**kwargs)) self.create_index_from_backup_operation = _AsyncioEndpoint( settings={ @@ -2000,7 +2018,9 @@ async def __create_index_from_backup_operation( callable=__create_index_from_backup_operation, ) - async def __delete_backup(self, backup_id, x_pinecone_api_version="2025-10", **kwargs): + async def __delete_backup( + self, backup_id, x_pinecone_api_version="2025-10", **kwargs + ) -> None: """Delete a backup # noqa: E501 Delete a backup. # noqa: E501 @@ -2033,7 +2053,7 @@ async def __delete_backup(self, backup_id, x_pinecone_api_version="2025-10", **k self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["backup_id"] = backup_id - return await self.call_with_http_info(**kwargs) + return cast(None, await self.call_with_http_info(**kwargs)) self.delete_backup = _AsyncioEndpoint( settings={ @@ -2069,7 +2089,7 @@ async def __delete_backup(self, backup_id, x_pinecone_api_version="2025-10", **k async def __delete_collection( self, collection_name, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> None: """Delete a collection # noqa: E501 Delete an existing collection. Serverless indexes do not support collections. # noqa: E501 @@ -2102,7 +2122,7 @@ async def __delete_collection( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["collection_name"] = collection_name - return await self.call_with_http_info(**kwargs) + return cast(None, await self.call_with_http_info(**kwargs)) self.delete_collection = _AsyncioEndpoint( settings={ @@ -2136,7 +2156,9 @@ async def __delete_collection( callable=__delete_collection, ) - async def __delete_index(self, index_name, x_pinecone_api_version="2025-10", **kwargs): + async def __delete_index( + self, index_name, x_pinecone_api_version="2025-10", **kwargs + ) -> None: """Delete an index # noqa: E501 Delete an existing index. # noqa: E501 @@ -2169,7 +2191,7 @@ async def __delete_index(self, index_name, x_pinecone_api_version="2025-10", **k self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["index_name"] = index_name - return await self.call_with_http_info(**kwargs) + return cast(None, await self.call_with_http_info(**kwargs)) self.delete_index = _AsyncioEndpoint( settings={ @@ -2203,7 +2225,9 @@ async def __delete_index(self, index_name, x_pinecone_api_version="2025-10", **k callable=__delete_index, ) - async def __describe_backup(self, backup_id, x_pinecone_api_version="2025-10", **kwargs): + async def __describe_backup( + self, backup_id, x_pinecone_api_version="2025-10", **kwargs + ) -> BackupModel: """Describe a backup # noqa: E501 Get a description of a backup. # noqa: E501 @@ -2236,7 +2260,7 @@ async def __describe_backup(self, backup_id, x_pinecone_api_version="2025-10", * self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["backup_id"] = backup_id - return await self.call_with_http_info(**kwargs) + return cast(BackupModel, await self.call_with_http_info(**kwargs)) self.describe_backup = _AsyncioEndpoint( settings={ @@ -2272,7 +2296,7 @@ async def __describe_backup(self, backup_id, x_pinecone_api_version="2025-10", * async def __describe_collection( self, collection_name, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> CollectionModel: """Describe a collection # noqa: E501 Get a description of a collection. Serverless indexes do not support collections. # noqa: E501 @@ -2305,7 +2329,7 @@ async def __describe_collection( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["collection_name"] = collection_name - return await self.call_with_http_info(**kwargs) + return cast(CollectionModel, await self.call_with_http_info(**kwargs)) self.describe_collection = _AsyncioEndpoint( settings={ @@ -2339,7 +2363,9 @@ async def __describe_collection( callable=__describe_collection, ) - async def __describe_index(self, index_name, x_pinecone_api_version="2025-10", **kwargs): + async def __describe_index( + self, index_name, x_pinecone_api_version="2025-10", **kwargs + ) -> IndexModel: """Describe an index # noqa: E501 Get a description of an index. # noqa: E501 @@ -2372,7 +2398,7 @@ async def __describe_index(self, index_name, x_pinecone_api_version="2025-10", * self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["index_name"] = index_name - return await self.call_with_http_info(**kwargs) + return cast(IndexModel, await self.call_with_http_info(**kwargs)) self.describe_index = _AsyncioEndpoint( settings={ @@ -2406,7 +2432,9 @@ async def __describe_index(self, index_name, x_pinecone_api_version="2025-10", * callable=__describe_index, ) - async def __describe_restore_job(self, job_id, x_pinecone_api_version="2025-10", **kwargs): + async def __describe_restore_job( + self, job_id, x_pinecone_api_version="2025-10", **kwargs + ) -> RestoreJobModel: """Describe a restore job # noqa: E501 Get a description of a restore job. # noqa: E501 @@ -2439,7 +2467,7 @@ async def __describe_restore_job(self, job_id, x_pinecone_api_version="2025-10", self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["job_id"] = job_id - return await self.call_with_http_info(**kwargs) + return cast(RestoreJobModel, await self.call_with_http_info(**kwargs)) self.describe_restore_job = _AsyncioEndpoint( settings={ @@ -2473,7 +2501,9 @@ async def __describe_restore_job(self, job_id, x_pinecone_api_version="2025-10", callable=__describe_restore_job, ) - async def __list_collections(self, x_pinecone_api_version="2025-10", **kwargs): + async def __list_collections( + self, x_pinecone_api_version="2025-10", **kwargs + ) -> CollectionList: """List collections # noqa: E501 List all collections in a project. Serverless indexes do not support collections. # noqa: E501 @@ -2504,7 +2534,7 @@ async def __list_collections(self, x_pinecone_api_version="2025-10", **kwargs): """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return await self.call_with_http_info(**kwargs) + return cast(CollectionList, await self.call_with_http_info(**kwargs)) self.list_collections = _AsyncioEndpoint( settings={ @@ -2537,7 +2567,7 @@ async def __list_collections(self, x_pinecone_api_version="2025-10", **kwargs): async def __list_index_backups( self, index_name, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> BackupList: """List backups for an index # noqa: E501 List all backups for an index. # noqa: E501 @@ -2572,7 +2602,7 @@ async def __list_index_backups( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["index_name"] = index_name - return await self.call_with_http_info(**kwargs) + return cast(BackupList, await self.call_with_http_info(**kwargs)) self.list_index_backups = _AsyncioEndpoint( settings={ @@ -2618,7 +2648,7 @@ async def __list_index_backups( callable=__list_index_backups, ) - async def __list_indexes(self, x_pinecone_api_version="2025-10", **kwargs): + async def __list_indexes(self, x_pinecone_api_version="2025-10", **kwargs) -> IndexList: """List indexes # noqa: E501 List all indexes in a project. # noqa: E501 @@ -2649,7 +2679,7 @@ async def __list_indexes(self, x_pinecone_api_version="2025-10", **kwargs): """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return await self.call_with_http_info(**kwargs) + return cast(IndexList, await self.call_with_http_info(**kwargs)) self.list_indexes = _AsyncioEndpoint( settings={ @@ -2680,7 +2710,9 @@ async def __list_indexes(self, x_pinecone_api_version="2025-10", **kwargs): callable=__list_indexes, ) - async def __list_project_backups(self, x_pinecone_api_version="2025-10", **kwargs): + async def __list_project_backups( + self, x_pinecone_api_version="2025-10", **kwargs + ) -> BackupList: """List backups for all indexes in a project # noqa: E501 List all backups for a project. # noqa: E501 @@ -2713,7 +2745,7 @@ async def __list_project_backups(self, x_pinecone_api_version="2025-10", **kwarg """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return await self.call_with_http_info(**kwargs) + return cast(BackupList, await self.call_with_http_info(**kwargs)) self.list_project_backups = _AsyncioEndpoint( settings={ @@ -2756,7 +2788,9 @@ async def __list_project_backups(self, x_pinecone_api_version="2025-10", **kwarg callable=__list_project_backups, ) - async def __list_restore_jobs(self, x_pinecone_api_version="2025-10", **kwargs): + async def __list_restore_jobs( + self, x_pinecone_api_version="2025-10", **kwargs + ) -> RestoreJobList: """List restore jobs # noqa: E501 List all restore jobs for a project. # noqa: E501 @@ -2789,7 +2823,7 @@ async def __list_restore_jobs(self, x_pinecone_api_version="2025-10", **kwargs): """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return await self.call_with_http_info(**kwargs) + return cast(RestoreJobList, await self.call_with_http_info(**kwargs)) self.list_restore_jobs = _AsyncioEndpoint( settings={ diff --git a/pinecone/core/openapi/db_control/model/backup_list.py b/pinecone/core/openapi/db_control/model/backup_list.py index c45302c6c..49633da76 100644 --- a/pinecone/core/openapi/db_control/model/backup_list.py +++ b/pinecone/core/openapi/db_control/model/backup_list.py @@ -26,6 +26,12 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.backup_model import BackupModel + from pinecone.core.openapi.db_control.model.pagination_response import PaginationResponse + def lazy_import(): from pinecone.core.openapi.db_control.model.backup_model import BackupModel @@ -111,6 +117,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of BackupList. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/backup_model.py b/pinecone/core/openapi/db_control/model/backup_model.py index 96182174d..f41e5f439 100644 --- a/pinecone/core/openapi/db_control/model/backup_model.py +++ b/pinecone/core/openapi/db_control/model/backup_model.py @@ -26,6 +26,12 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.backup_model_schema import BackupModelSchema + from pinecone.core.openapi.db_control.model.index_tags import IndexTags + def lazy_import(): from pinecone.core.openapi.db_control.model.backup_model_schema import BackupModelSchema @@ -141,6 +147,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of BackupModel. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data( diff --git a/pinecone/core/openapi/db_control/model/backup_model_schema.py b/pinecone/core/openapi/db_control/model/backup_model_schema.py index 0b9a02d34..157d5ec36 100644 --- a/pinecone/core/openapi/db_control/model/backup_model_schema.py +++ b/pinecone/core/openapi/db_control/model/backup_model_schema.py @@ -26,6 +26,13 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.backup_model_schema_fields import ( + BackupModelSchemaFields, + ) + def lazy_import(): from pinecone.core.openapi.db_control.model.backup_model_schema_fields import ( @@ -109,6 +116,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of BackupModelSchema. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], fields, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/backup_model_schema_fields.py b/pinecone/core/openapi/db_control/model/backup_model_schema_fields.py index 51a95c0d6..f95b7cb0f 100644 --- a/pinecone/core/openapi/db_control/model/backup_model_schema_fields.py +++ b/pinecone/core/openapi/db_control/model/backup_model_schema_fields.py @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of BackupModelSchemaFields. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/byoc.py b/pinecone/core/openapi/db_control/model/byoc.py index 7d87e24bf..2e45fc821 100644 --- a/pinecone/core/openapi/db_control/model/byoc.py +++ b/pinecone/core/openapi/db_control/model/byoc.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.byoc_spec import ByocSpec + def lazy_import(): from pinecone.core.openapi.db_control.model.byoc_spec import ByocSpec @@ -107,6 +112,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of BYOC. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], byoc, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/byoc_spec.py b/pinecone/core/openapi/db_control/model/byoc_spec.py index ef30a46db..9eaae678a 100644 --- a/pinecone/core/openapi/db_control/model/byoc_spec.py +++ b/pinecone/core/openapi/db_control/model/byoc_spec.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.backup_model_schema import BackupModelSchema + def lazy_import(): from pinecone.core.openapi.db_control.model.backup_model_schema import BackupModelSchema @@ -109,6 +114,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ByocSpec. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/collection_list.py b/pinecone/core/openapi/db_control/model/collection_list.py index 5c1166e7d..2b495fea5 100644 --- a/pinecone/core/openapi/db_control/model/collection_list.py +++ b/pinecone/core/openapi/db_control/model/collection_list.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.collection_model import CollectionModel + def lazy_import(): from pinecone.core.openapi.db_control.model.collection_model import CollectionModel @@ -107,6 +112,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of CollectionList. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/collection_model.py b/pinecone/core/openapi/db_control/model/collection_model.py index 380be520a..3ebbb8f07 100644 --- a/pinecone/core/openapi/db_control/model/collection_model.py +++ b/pinecone/core/openapi/db_control/model/collection_model.py @@ -111,6 +111,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of CollectionModel. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], name, status, environment, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/configure_index_request.py b/pinecone/core/openapi/db_control/model/configure_index_request.py index 7e8d58884..8d2241e09 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request.py @@ -26,6 +26,14 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.configure_index_request_embed import ( + ConfigureIndexRequestEmbed, + ) + from pinecone.core.openapi.db_control.model.index_tags import IndexTags + def lazy_import(): from pinecone.core.openapi.db_control.model.configure_index_request_embed import ( @@ -117,6 +125,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ConfigureIndexRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/configure_index_request_embed.py b/pinecone/core/openapi/db_control/model/configure_index_request_embed.py index 3491145a1..1195d80f2 100644 --- a/pinecone/core/openapi/db_control/model/configure_index_request_embed.py +++ b/pinecone/core/openapi/db_control/model/configure_index_request_embed.py @@ -85,9 +85,9 @@ def openapi_types(cls): """ return { "model": (str,), # noqa: E501 - "field_map": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 - "read_parameters": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 - "write_parameters": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "field_map": (Dict[str, Any],), # noqa: E501 + "read_parameters": (Dict[str, Any],), # noqa: E501 + "write_parameters": (Dict[str, Any],), # noqa: E501 } @cached_class_property @@ -105,6 +105,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ConfigureIndexRequestEmbed. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 @@ -142,9 +153,9 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) model (str): The name of the embedding model to use with the index. The index dimension and model dimension must match, and the index similarity metric must be supported by the model. The index embedding model cannot be changed once set. [optional] # noqa: E501 - field_map ({str: (bool, dict, float, int, list, str, none_type)}): Identifies the name of the text field from your document model that will be embedded. [optional] # noqa: E501 - read_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The read parameters for the embedding model. [optional] # noqa: E501 - write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 + field_map (Dict[str, Any]): Identifies the name of the text field from your document model that will be embedded. [optional] # noqa: E501 + read_parameters (Dict[str, Any]): The read parameters for the embedding model. [optional] # noqa: E501 + write_parameters (Dict[str, Any]): The write parameters for the embedding model. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) @@ -235,9 +246,9 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) model (str): The name of the embedding model to use with the index. The index dimension and model dimension must match, and the index similarity metric must be supported by the model. The index embedding model cannot be changed once set. [optional] # noqa: E501 - field_map ({str: (bool, dict, float, int, list, str, none_type)}): Identifies the name of the text field from your document model that will be embedded. [optional] # noqa: E501 - read_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The read parameters for the embedding model. [optional] # noqa: E501 - write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 + field_map (Dict[str, Any]): Identifies the name of the text field from your document model that will be embedded. [optional] # noqa: E501 + read_parameters (Dict[str, Any]): The read parameters for the embedding model. [optional] # noqa: E501 + write_parameters (Dict[str, Any]): The write parameters for the embedding model. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) diff --git a/pinecone/core/openapi/db_control/model/create_backup_request.py b/pinecone/core/openapi/db_control/model/create_backup_request.py index b6eeb1da8..cbdde388e 100644 --- a/pinecone/core/openapi/db_control/model/create_backup_request.py +++ b/pinecone/core/openapi/db_control/model/create_backup_request.py @@ -101,6 +101,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of CreateBackupRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/create_collection_request.py b/pinecone/core/openapi/db_control/model/create_collection_request.py index 4d957bfde..164e0a3d2 100644 --- a/pinecone/core/openapi/db_control/model/create_collection_request.py +++ b/pinecone/core/openapi/db_control/model/create_collection_request.py @@ -103,6 +103,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of CreateCollectionRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], name, source, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/create_index_for_model_request.py b/pinecone/core/openapi/db_control/model/create_index_for_model_request.py index 78207021b..42dc820d5 100644 --- a/pinecone/core/openapi/db_control/model/create_index_for_model_request.py +++ b/pinecone/core/openapi/db_control/model/create_index_for_model_request.py @@ -26,6 +26,16 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.backup_model_schema import BackupModelSchema + from pinecone.core.openapi.db_control.model.create_index_for_model_request_embed import ( + CreateIndexForModelRequestEmbed, + ) + from pinecone.core.openapi.db_control.model.index_tags import IndexTags + from pinecone.core.openapi.db_control.model.read_capacity import ReadCapacity + def lazy_import(): from pinecone.core.openapi.db_control.model.backup_model_schema import BackupModelSchema @@ -131,6 +141,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of CreateIndexForModelRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], name, cloud, region, embed, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py b/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py index 38027b94b..8e5d7b1a7 100644 --- a/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py +++ b/pinecone/core/openapi/db_control/model/create_index_for_model_request_embed.py @@ -85,11 +85,11 @@ def openapi_types(cls): """ return { "model": (str,), # noqa: E501 - "field_map": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "field_map": (Dict[str, Any],), # noqa: E501 "metric": (str,), # noqa: E501 "dimension": (int,), # noqa: E501 - "read_parameters": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 - "write_parameters": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "read_parameters": (Dict[str, Any],), # noqa: E501 + "write_parameters": (Dict[str, Any],), # noqa: E501 } @cached_class_property @@ -109,6 +109,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of CreateIndexForModelRequestEmbed. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], model, field_map, *args, **kwargs) -> T: # noqa: E501 @@ -116,7 +127,7 @@ def _from_openapi_data(cls: Type[T], model, field_map, *args, **kwargs) -> T: # Args: model (str): The name of the embedding model to use for the index. - field_map ({str: (bool, dict, float, int, list, str, none_type)}): Identifies the name of the text field from your document model that will be embedded. + field_map (Dict[str, Any]): Identifies the name of the text field from your document model that will be embedded. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -151,8 +162,8 @@ def _from_openapi_data(cls: Type[T], model, field_map, *args, **kwargs) -> T: # _visited_composed_classes = (Animal,) metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If not specified, the metric will be defaulted according to the model. Cannot be updated once set. Possible values: `cosine`, `euclidean`, or `dotproduct`. [optional] # noqa: E501 dimension (int): The dimension of embedding vectors produced for the index. [optional] # noqa: E501 - read_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The read parameters for the embedding model. [optional] # noqa: E501 - write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 + read_parameters (Dict[str, Any]): The read parameters for the embedding model. [optional] # noqa: E501 + write_parameters (Dict[str, Any]): The write parameters for the embedding model. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) @@ -215,7 +226,7 @@ def __init__(self, model, field_map, *args, **kwargs) -> None: # noqa: E501 Args: model (str): The name of the embedding model to use for the index. - field_map ({str: (bool, dict, float, int, list, str, none_type)}): Identifies the name of the text field from your document model that will be embedded. + field_map (Dict[str, Any]): Identifies the name of the text field from your document model that will be embedded. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -250,8 +261,8 @@ def __init__(self, model, field_map, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If not specified, the metric will be defaulted according to the model. Cannot be updated once set. Possible values: `cosine`, `euclidean`, or `dotproduct`. [optional] # noqa: E501 dimension (int): The dimension of embedding vectors produced for the index. [optional] # noqa: E501 - read_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The read parameters for the embedding model. [optional] # noqa: E501 - write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 + read_parameters (Dict[str, Any]): The read parameters for the embedding model. [optional] # noqa: E501 + write_parameters (Dict[str, Any]): The write parameters for the embedding model. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) diff --git a/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py b/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py index 083749941..9139cffe1 100644 --- a/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py +++ b/pinecone/core/openapi/db_control/model/create_index_from_backup_request.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.index_tags import IndexTags + def lazy_import(): from pinecone.core.openapi.db_control.model.index_tags import IndexTags @@ -113,6 +118,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of CreateIndexFromBackupRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], name, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py b/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py index 88df35705..d90ee4982 100644 --- a/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py +++ b/pinecone/core/openapi/db_control/model/create_index_from_backup_response.py @@ -101,6 +101,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of CreateIndexFromBackupResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], restore_job_id, index_id, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/create_index_request.py b/pinecone/core/openapi/db_control/model/create_index_request.py index 2106ab86b..dcbfc2e51 100644 --- a/pinecone/core/openapi/db_control/model/create_index_request.py +++ b/pinecone/core/openapi/db_control/model/create_index_request.py @@ -26,6 +26,12 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.index_spec import IndexSpec + from pinecone.core.openapi.db_control.model.index_tags import IndexTags + def lazy_import(): from pinecone.core.openapi.db_control.model.index_spec import IndexSpec @@ -124,6 +130,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of CreateIndexRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], name, spec, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/error_response.py b/pinecone/core/openapi/db_control/model/error_response.py index 56222a1f6..781a9f48f 100644 --- a/pinecone/core/openapi/db_control/model/error_response.py +++ b/pinecone/core/openapi/db_control/model/error_response.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.error_response_error import ErrorResponseError + def lazy_import(): from pinecone.core.openapi.db_control.model.error_response_error import ErrorResponseError @@ -109,6 +114,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ErrorResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/error_response_error.py b/pinecone/core/openapi/db_control/model/error_response_error.py index ee7b00355..16fc54343 100644 --- a/pinecone/core/openapi/db_control/model/error_response_error.py +++ b/pinecone/core/openapi/db_control/model/error_response_error.py @@ -86,7 +86,7 @@ def openapi_types(cls): return { "code": (str,), # noqa: E501 "message": (str,), # noqa: E501 - "details": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "details": (Dict[str, Any],), # noqa: E501 } @cached_class_property @@ -103,6 +103,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ErrorResponseError. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # noqa: E501 @@ -143,7 +154,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 + details (Dict[str, Any]): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) @@ -239,7 +250,7 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 + details (Dict[str, Any]): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) diff --git a/pinecone/core/openapi/db_control/model/index_list.py b/pinecone/core/openapi/db_control/model/index_list.py index 046492c1f..ff4ea930c 100644 --- a/pinecone/core/openapi/db_control/model/index_list.py +++ b/pinecone/core/openapi/db_control/model/index_list.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.index_model import IndexModel + def lazy_import(): from pinecone.core.openapi.db_control.model.index_model import IndexModel @@ -107,6 +112,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of IndexList. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/index_model.py b/pinecone/core/openapi/db_control/model/index_model.py index b4af577fa..4afd6f848 100644 --- a/pinecone/core/openapi/db_control/model/index_model.py +++ b/pinecone/core/openapi/db_control/model/index_model.py @@ -26,6 +26,13 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.index_model_status import IndexModelStatus + from pinecone.core.openapi.db_control.model.index_tags import IndexTags + from pinecone.core.openapi.db_control.model.model_index_embed import ModelIndexEmbed + def lazy_import(): from pinecone.core.openapi.db_control.model.index_model_status import IndexModelStatus @@ -102,7 +109,7 @@ def openapi_types(cls): "name": (str,), # noqa: E501 "metric": (str,), # noqa: E501 "host": (str,), # noqa: E501 - "spec": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "spec": (Dict[str, Any],), # noqa: E501 "status": (IndexModelStatus,), # noqa: E501 "vector_type": (str,), # noqa: E501 "dimension": (int,), # noqa: E501 @@ -134,6 +141,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of IndexModel. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], name, metric, host, spec, status, *args, **kwargs) -> T: # noqa: E501 @@ -143,7 +161,7 @@ def _from_openapi_data(cls: Type[T], name, metric, host, spec, status, *args, ** name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If the 'vector_type' is 'sparse', the metric must be 'dotproduct'. If the `vector_type` is `dense`, the metric defaults to 'cosine'. Possible values: `cosine`, `euclidean`, or `dotproduct`. host (str): The URL address where the index is hosted. - spec ({str: (bool, dict, float, int, list, str, none_type)}): The spec object defines how the index should be deployed. + spec (Dict[str, Any]): The spec object defines how the index should be deployed. status (IndexModelStatus): Keyword Args: @@ -252,7 +270,7 @@ def __init__(self, name, metric, host, spec, status, *args, **kwargs) -> None: name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If the 'vector_type' is 'sparse', the metric must be 'dotproduct'. If the `vector_type` is `dense`, the metric defaults to 'cosine'. Possible values: `cosine`, `euclidean`, or `dotproduct`. host (str): The URL address where the index is hosted. - spec ({str: (bool, dict, float, int, list, str, none_type)}): The spec object defines how the index should be deployed. + spec (Dict[str, Any]): The spec object defines how the index should be deployed. status (IndexModelStatus): Keyword Args: diff --git a/pinecone/core/openapi/db_control/model/index_model_status.py b/pinecone/core/openapi/db_control/model/index_model_status.py index d020f8cbf..3d4b20fec 100644 --- a/pinecone/core/openapi/db_control/model/index_model_status.py +++ b/pinecone/core/openapi/db_control/model/index_model_status.py @@ -101,6 +101,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of IndexModelStatus. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], ready, state, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/index_spec.py b/pinecone/core/openapi/db_control/model/index_spec.py index ac7cf2a4f..44de6215d 100644 --- a/pinecone/core/openapi/db_control/model/index_spec.py +++ b/pinecone/core/openapi/db_control/model/index_spec.py @@ -26,6 +26,16 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.byoc import BYOC + from pinecone.core.openapi.db_control.model.byoc_spec import ByocSpec + from pinecone.core.openapi.db_control.model.pod_based import PodBased + from pinecone.core.openapi.db_control.model.pod_spec import PodSpec + from pinecone.core.openapi.db_control.model.serverless import Serverless + from pinecone.core.openapi.db_control.model.serverless_spec import ServerlessSpec + def lazy_import(): from pinecone.core.openapi.db_control.model.byoc import BYOC @@ -320,7 +330,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) @cached_property - def _composed_schemas(): # type: ignore + def _composed_schemas(): # we need this here to make our import statements work # we must store _composed_schemas in here so the code is only run # when we invoke this method. If we kept this at the class diff --git a/pinecone/core/openapi/db_control/model/index_tags.py b/pinecone/core/openapi/db_control/model/index_tags.py index b4df234ba..a87b1ff03 100644 --- a/pinecone/core/openapi/db_control/model/index_tags.py +++ b/pinecone/core/openapi/db_control/model/index_tags.py @@ -95,6 +95,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of IndexTags. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/model_index_embed.py b/pinecone/core/openapi/db_control/model/model_index_embed.py index c94396381..cf9ab54e1 100644 --- a/pinecone/core/openapi/db_control/model/model_index_embed.py +++ b/pinecone/core/openapi/db_control/model/model_index_embed.py @@ -90,9 +90,9 @@ def openapi_types(cls): "metric": (str,), # noqa: E501 "dimension": (int,), # noqa: E501 "vector_type": (str,), # noqa: E501 - "field_map": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 - "read_parameters": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 - "write_parameters": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "field_map": (Dict[str, Any],), # noqa: E501 + "read_parameters": (Dict[str, Any],), # noqa: E501 + "write_parameters": (Dict[str, Any],), # noqa: E501 } @cached_class_property @@ -113,6 +113,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ModelIndexEmbed. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], model, *args, **kwargs) -> T: # noqa: E501 @@ -155,9 +166,9 @@ def _from_openapi_data(cls: Type[T], model, *args, **kwargs) -> T: # noqa: E501 metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If not specified, the metric will be defaulted according to the model. Cannot be updated once set. Possible values: `cosine`, `euclidean`, or `dotproduct`. [optional] # noqa: E501 dimension (int): The dimensions of the vectors to be inserted in the index. [optional] # noqa: E501 vector_type (str): The index vector type. You can use 'dense' or 'sparse'. If 'dense', the vector dimension must be specified. If 'sparse', the vector dimension should not be specified. [optional] if omitted the server will use the default value of "dense". # noqa: E501 - field_map ({str: (bool, dict, float, int, list, str, none_type)}): Identifies the name of the text field from your document model that is embedded. [optional] # noqa: E501 - read_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The read parameters for the embedding model. [optional] # noqa: E501 - write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 + field_map (Dict[str, Any]): Identifies the name of the text field from your document model that is embedded. [optional] # noqa: E501 + read_parameters (Dict[str, Any]): The read parameters for the embedding model. [optional] # noqa: E501 + write_parameters (Dict[str, Any]): The write parameters for the embedding model. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) @@ -254,9 +265,9 @@ def __init__(self, model, *args, **kwargs) -> None: # noqa: E501 metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'. If not specified, the metric will be defaulted according to the model. Cannot be updated once set. Possible values: `cosine`, `euclidean`, or `dotproduct`. [optional] # noqa: E501 dimension (int): The dimensions of the vectors to be inserted in the index. [optional] # noqa: E501 vector_type (str): The index vector type. You can use 'dense' or 'sparse'. If 'dense', the vector dimension must be specified. If 'sparse', the vector dimension should not be specified. [optional] if omitted the server will use the default value of "dense". # noqa: E501 - field_map ({str: (bool, dict, float, int, list, str, none_type)}): Identifies the name of the text field from your document model that is embedded. [optional] # noqa: E501 - read_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The read parameters for the embedding model. [optional] # noqa: E501 - write_parameters ({str: (bool, dict, float, int, list, str, none_type)}): The write parameters for the embedding model. [optional] # noqa: E501 + field_map (Dict[str, Any]): Identifies the name of the text field from your document model that is embedded. [optional] # noqa: E501 + read_parameters (Dict[str, Any]): The read parameters for the embedding model. [optional] # noqa: E501 + write_parameters (Dict[str, Any]): The write parameters for the embedding model. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) diff --git a/pinecone/core/openapi/db_control/model/pagination_response.py b/pinecone/core/openapi/db_control/model/pagination_response.py index b357e5224..945b6e2e7 100644 --- a/pinecone/core/openapi/db_control/model/pagination_response.py +++ b/pinecone/core/openapi/db_control/model/pagination_response.py @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of PaginationResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], next, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/pod_based.py b/pinecone/core/openapi/db_control/model/pod_based.py index dddba5b11..70a67564f 100644 --- a/pinecone/core/openapi/db_control/model/pod_based.py +++ b/pinecone/core/openapi/db_control/model/pod_based.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.pod_spec import PodSpec + def lazy_import(): from pinecone.core.openapi.db_control.model.pod_spec import PodSpec @@ -107,6 +112,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of PodBased. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], pod, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/pod_spec.py b/pinecone/core/openapi/db_control/model/pod_spec.py index 1714212c5..dcb7d6544 100644 --- a/pinecone/core/openapi/db_control/model/pod_spec.py +++ b/pinecone/core/openapi/db_control/model/pod_spec.py @@ -26,6 +26,13 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.pod_spec_metadata_config import ( + PodSpecMetadataConfig, + ) + def lazy_import(): from pinecone.core.openapi.db_control.model.pod_spec_metadata_config import ( @@ -125,6 +132,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of PodSpec. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], environment, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py b/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py index 5508d9316..3fdf1753b 100644 --- a/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py +++ b/pinecone/core/openapi/db_control/model/pod_spec_metadata_config.py @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of PodSpecMetadataConfig. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/read_capacity.py b/pinecone/core/openapi/db_control/model/read_capacity.py index 4b773a4f8..98e972ca2 100644 --- a/pinecone/core/openapi/db_control/model/read_capacity.py +++ b/pinecone/core/openapi/db_control/model/read_capacity.py @@ -26,6 +26,19 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.read_capacity_dedicated_config import ( + ReadCapacityDedicatedConfig, + ) + from pinecone.core.openapi.db_control.model.read_capacity_dedicated_spec import ( + ReadCapacityDedicatedSpec, + ) + from pinecone.core.openapi.db_control.model.read_capacity_on_demand_spec import ( + ReadCapacityOnDemandSpec, + ) + def lazy_import(): from pinecone.core.openapi.db_control.model.read_capacity_dedicated_config import ( @@ -325,7 +338,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) @cached_property - def _composed_schemas(): # type: ignore + def _composed_schemas(): # we need this here to make our import statements work # we must store _composed_schemas in here so the code is only run # when we invoke this method. If we kept this at the class diff --git a/pinecone/core/openapi/db_control/model/read_capacity_dedicated_config.py b/pinecone/core/openapi/db_control/model/read_capacity_dedicated_config.py index e95c2fdf1..48f9648c0 100644 --- a/pinecone/core/openapi/db_control/model/read_capacity_dedicated_config.py +++ b/pinecone/core/openapi/db_control/model/read_capacity_dedicated_config.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.scaling_config_manual import ScalingConfigManual + def lazy_import(): from pinecone.core.openapi.db_control.model.scaling_config_manual import ScalingConfigManual @@ -111,6 +116,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ReadCapacityDedicatedConfig. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], node_type, scaling, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/read_capacity_dedicated_spec.py b/pinecone/core/openapi/db_control/model/read_capacity_dedicated_spec.py index 54bfafc61..6a77424ce 100644 --- a/pinecone/core/openapi/db_control/model/read_capacity_dedicated_spec.py +++ b/pinecone/core/openapi/db_control/model/read_capacity_dedicated_spec.py @@ -26,6 +26,13 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.read_capacity_dedicated_config import ( + ReadCapacityDedicatedConfig, + ) + def lazy_import(): from pinecone.core.openapi.db_control.model.read_capacity_dedicated_config import ( @@ -111,6 +118,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ReadCapacityDedicatedSpec. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], mode, dedicated, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/read_capacity_dedicated_spec_response.py b/pinecone/core/openapi/db_control/model/read_capacity_dedicated_spec_response.py index c1eb3b18a..299450d04 100644 --- a/pinecone/core/openapi/db_control/model/read_capacity_dedicated_spec_response.py +++ b/pinecone/core/openapi/db_control/model/read_capacity_dedicated_spec_response.py @@ -26,6 +26,14 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.read_capacity_dedicated_config import ( + ReadCapacityDedicatedConfig, + ) + from pinecone.core.openapi.db_control.model.read_capacity_status import ReadCapacityStatus + def lazy_import(): from pinecone.core.openapi.db_control.model.read_capacity_dedicated_config import ( @@ -108,6 +116,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ReadCapacityDedicatedSpecResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], mode, dedicated, status, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/read_capacity_on_demand_spec.py b/pinecone/core/openapi/db_control/model/read_capacity_on_demand_spec.py index 9446c424f..4bfd4f92f 100644 --- a/pinecone/core/openapi/db_control/model/read_capacity_on_demand_spec.py +++ b/pinecone/core/openapi/db_control/model/read_capacity_on_demand_spec.py @@ -93,6 +93,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ReadCapacityOnDemandSpec. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], mode, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/read_capacity_on_demand_spec_response.py b/pinecone/core/openapi/db_control/model/read_capacity_on_demand_spec_response.py index e01b47d51..6b49936f7 100644 --- a/pinecone/core/openapi/db_control/model/read_capacity_on_demand_spec_response.py +++ b/pinecone/core/openapi/db_control/model/read_capacity_on_demand_spec_response.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.read_capacity_status import ReadCapacityStatus + def lazy_import(): from pinecone.core.openapi.db_control.model.read_capacity_status import ReadCapacityStatus @@ -102,6 +107,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ReadCapacityOnDemandSpecResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], mode, status, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/read_capacity_response.py b/pinecone/core/openapi/db_control/model/read_capacity_response.py index 6d5047e17..d1dc889a7 100644 --- a/pinecone/core/openapi/db_control/model/read_capacity_response.py +++ b/pinecone/core/openapi/db_control/model/read_capacity_response.py @@ -26,6 +26,20 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.read_capacity_dedicated_config import ( + ReadCapacityDedicatedConfig, + ) + from pinecone.core.openapi.db_control.model.read_capacity_dedicated_spec_response import ( + ReadCapacityDedicatedSpecResponse, + ) + from pinecone.core.openapi.db_control.model.read_capacity_on_demand_spec_response import ( + ReadCapacityOnDemandSpecResponse, + ) + from pinecone.core.openapi.db_control.model.read_capacity_status import ReadCapacityStatus + def lazy_import(): from pinecone.core.openapi.db_control.model.read_capacity_dedicated_config import ( @@ -331,7 +345,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) @cached_property - def _composed_schemas(): # type: ignore + def _composed_schemas(): # we need this here to make our import statements work # we must store _composed_schemas in here so the code is only run # when we invoke this method. If we kept this at the class diff --git a/pinecone/core/openapi/db_control/model/read_capacity_status.py b/pinecone/core/openapi/db_control/model/read_capacity_status.py index 107e40317..735a64ad5 100644 --- a/pinecone/core/openapi/db_control/model/read_capacity_status.py +++ b/pinecone/core/openapi/db_control/model/read_capacity_status.py @@ -105,6 +105,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ReadCapacityStatus. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], state, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/restore_job_list.py b/pinecone/core/openapi/db_control/model/restore_job_list.py index a01d8b6b8..e1a4d21a5 100644 --- a/pinecone/core/openapi/db_control/model/restore_job_list.py +++ b/pinecone/core/openapi/db_control/model/restore_job_list.py @@ -26,6 +26,12 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.pagination_response import PaginationResponse + from pinecone.core.openapi.db_control.model.restore_job_model import RestoreJobModel + def lazy_import(): from pinecone.core.openapi.db_control.model.pagination_response import PaginationResponse @@ -111,6 +117,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of RestoreJobList. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], data, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/restore_job_model.py b/pinecone/core/openapi/db_control/model/restore_job_model.py index 5f68f3c5a..d278d0b6c 100644 --- a/pinecone/core/openapi/db_control/model/restore_job_model.py +++ b/pinecone/core/openapi/db_control/model/restore_job_model.py @@ -115,6 +115,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of RestoreJobModel. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data( diff --git a/pinecone/core/openapi/db_control/model/scaling_config_manual.py b/pinecone/core/openapi/db_control/model/scaling_config_manual.py index 0639533f7..75d02ea42 100644 --- a/pinecone/core/openapi/db_control/model/scaling_config_manual.py +++ b/pinecone/core/openapi/db_control/model/scaling_config_manual.py @@ -104,6 +104,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ScalingConfigManual. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], replicas, shards, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/serverless.py b/pinecone/core/openapi/db_control/model/serverless.py index d36a79a52..283f2b74e 100644 --- a/pinecone/core/openapi/db_control/model/serverless.py +++ b/pinecone/core/openapi/db_control/model/serverless.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.serverless_spec import ServerlessSpec + def lazy_import(): from pinecone.core.openapi.db_control.model.serverless_spec import ServerlessSpec @@ -107,6 +112,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of Serverless. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], serverless, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/serverless_spec.py b/pinecone/core/openapi/db_control/model/serverless_spec.py index 0f1800f1e..239ac69ad 100644 --- a/pinecone/core/openapi/db_control/model/serverless_spec.py +++ b/pinecone/core/openapi/db_control/model/serverless_spec.py @@ -26,6 +26,12 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.backup_model_schema import BackupModelSchema + from pinecone.core.openapi.db_control.model.read_capacity import ReadCapacity + def lazy_import(): from pinecone.core.openapi.db_control.model.backup_model_schema import BackupModelSchema @@ -117,6 +123,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ServerlessSpec. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], cloud, region, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_control/model/serverless_spec_response.py b/pinecone/core/openapi/db_control/model/serverless_spec_response.py index c542323e9..cbd4f69c3 100644 --- a/pinecone/core/openapi/db_control/model/serverless_spec_response.py +++ b/pinecone/core/openapi/db_control/model/serverless_spec_response.py @@ -26,6 +26,12 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_control.model.backup_model_schema import BackupModelSchema + from pinecone.core.openapi.db_control.model.read_capacity_response import ReadCapacityResponse + def lazy_import(): from pinecone.core.openapi.db_control.model.backup_model_schema import BackupModelSchema @@ -117,6 +123,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ServerlessSpecResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], cloud, region, read_capacity, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/api/bulk_operations_api.py b/pinecone/core/openapi/db_data/api/bulk_operations_api.py index 237b9f3b2..b1446c703 100644 --- a/pinecone/core/openapi/db_data/api/bulk_operations_api.py +++ b/pinecone/core/openapi/db_data/api/bulk_operations_api.py @@ -9,6 +9,11 @@ Contact: support@pinecone.io """ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, cast +from multiprocessing.pool import ApplyResult + from pinecone.openapi_support import ApiClient, AsyncioApiClient from pinecone.openapi_support.endpoint_utils import ( ExtraOpenApiKwargsTypedDict, @@ -43,7 +48,7 @@ def __init__(self, api_client=None) -> None: def __cancel_bulk_import( self, id, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> Dict[str, Any] | ApplyResult[Dict[str, Any]]: """Cancel an import # noqa: E501 Cancel an import operation if it is not yet finished. It has no effect if the operation is already finished. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 @@ -76,18 +81,20 @@ def __cancel_bulk_import( async_req (bool): execute request asynchronously Returns: - {str: (bool, dict, float, int, list, str, none_type)} + Dict[str, Any] If the method is called asynchronously, returns the request thread. """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["id"] = id - return self.call_with_http_info(**kwargs) + return cast( + Dict[str, Any] | ApplyResult[Dict[str, Any]], self.call_with_http_info(**kwargs) + ) self.cancel_bulk_import = _Endpoint( settings={ - "response_type": ({str: (bool, dict, float, int, list, str, none_type)},), + "response_type": (Dict[str, Any],), "auth": ["ApiKeyAuth"], "endpoint_path": "/bulk/imports/{id}", "operation_id": "cancel_bulk_import", @@ -116,7 +123,7 @@ def __cancel_bulk_import( def __describe_bulk_import( self, id, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> ImportModel | ApplyResult[ImportModel]: """Describe an import # noqa: E501 Return details of a specific import operation. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 @@ -156,7 +163,7 @@ def __describe_bulk_import( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["id"] = id - return self.call_with_http_info(**kwargs) + return cast(ImportModel | ApplyResult[ImportModel], self.call_with_http_info(**kwargs)) self.describe_bulk_import = _Endpoint( settings={ @@ -189,7 +196,7 @@ def __describe_bulk_import( def __list_bulk_imports( self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> ListImportsResponse | ApplyResult[ListImportsResponse]: """List imports # noqa: E501 List all recent and ongoing import operations. By default, `list_imports` returns up to 100 imports per page. If the `limit` parameter is set, `list` returns up to that number of imports instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of imports. When the response does not include a `pagination_token`, there are no more imports to return. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 @@ -229,7 +236,10 @@ def __list_bulk_imports( """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return self.call_with_http_info(**kwargs) + return cast( + ListImportsResponse | ApplyResult[ListImportsResponse], + self.call_with_http_info(**kwargs), + ) self.list_bulk_imports = _Endpoint( settings={ @@ -277,7 +287,7 @@ def __start_bulk_import( start_import_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> StartImportResponse | ApplyResult[StartImportResponse]: """Start import # noqa: E501 Start an asynchronous import of vectors from object storage into an index. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 @@ -317,7 +327,10 @@ def __start_bulk_import( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["start_import_request"] = start_import_request - return self.call_with_http_info(**kwargs) + return cast( + StartImportResponse | ApplyResult[StartImportResponse], + self.call_with_http_info(**kwargs), + ) self.start_bulk_import = _Endpoint( settings={ @@ -366,7 +379,9 @@ def __init__(self, api_client=None) -> None: api_client = AsyncioApiClient() self.api_client = api_client - async def __cancel_bulk_import(self, id, x_pinecone_api_version="2025-10", **kwargs): + async def __cancel_bulk_import( + self, id, x_pinecone_api_version="2025-10", **kwargs + ) -> Dict[str, Any]: """Cancel an import # noqa: E501 Cancel an import operation if it is not yet finished. It has no effect if the operation is already finished. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 @@ -394,16 +409,16 @@ async def __cancel_bulk_import(self, id, x_pinecone_api_version="2025-10", **kwa Default is True. Returns: - {str: (bool, dict, float, int, list, str, none_type)} + Dict[str, Any] """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["id"] = id - return await self.call_with_http_info(**kwargs) + return cast(Dict[str, Any], await self.call_with_http_info(**kwargs)) self.cancel_bulk_import = _AsyncioEndpoint( settings={ - "response_type": ({str: (bool, dict, float, int, list, str, none_type)},), + "response_type": (Dict[str, Any],), "auth": ["ApiKeyAuth"], "endpoint_path": "/bulk/imports/{id}", "operation_id": "cancel_bulk_import", @@ -430,7 +445,9 @@ async def __cancel_bulk_import(self, id, x_pinecone_api_version="2025-10", **kwa callable=__cancel_bulk_import, ) - async def __describe_bulk_import(self, id, x_pinecone_api_version="2025-10", **kwargs): + async def __describe_bulk_import( + self, id, x_pinecone_api_version="2025-10", **kwargs + ) -> ImportModel: """Describe an import # noqa: E501 Return details of a specific import operation. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 @@ -463,7 +480,7 @@ async def __describe_bulk_import(self, id, x_pinecone_api_version="2025-10", **k self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["id"] = id - return await self.call_with_http_info(**kwargs) + return cast(ImportModel, await self.call_with_http_info(**kwargs)) self.describe_bulk_import = _AsyncioEndpoint( settings={ @@ -494,7 +511,9 @@ async def __describe_bulk_import(self, id, x_pinecone_api_version="2025-10", **k callable=__describe_bulk_import, ) - async def __list_bulk_imports(self, x_pinecone_api_version="2025-10", **kwargs): + async def __list_bulk_imports( + self, x_pinecone_api_version="2025-10", **kwargs + ) -> ListImportsResponse: """List imports # noqa: E501 List all recent and ongoing import operations. By default, `list_imports` returns up to 100 imports per page. If the `limit` parameter is set, `list` returns up to that number of imports instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of imports. When the response does not include a `pagination_token`, there are no more imports to return. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 @@ -527,7 +546,7 @@ async def __list_bulk_imports(self, x_pinecone_api_version="2025-10", **kwargs): """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return await self.call_with_http_info(**kwargs) + return cast(ListImportsResponse, await self.call_with_http_info(**kwargs)) self.list_bulk_imports = _AsyncioEndpoint( settings={ @@ -572,7 +591,7 @@ async def __list_bulk_imports(self, x_pinecone_api_version="2025-10", **kwargs): async def __start_bulk_import( self, start_import_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> StartImportResponse: """Start import # noqa: E501 Start an asynchronous import of vectors from object storage into an index. For guidance and examples, see [Import data](https://docs.pinecone.io/guides/index-data/import-data). # noqa: E501 @@ -605,7 +624,7 @@ async def __start_bulk_import( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["start_import_request"] = start_import_request - return await self.call_with_http_info(**kwargs) + return cast(StartImportResponse, await self.call_with_http_info(**kwargs)) self.start_bulk_import = _AsyncioEndpoint( settings={ diff --git a/pinecone/core/openapi/db_data/api/namespace_operations_api.py b/pinecone/core/openapi/db_data/api/namespace_operations_api.py index 6111d4c41..733bfa7c7 100644 --- a/pinecone/core/openapi/db_data/api/namespace_operations_api.py +++ b/pinecone/core/openapi/db_data/api/namespace_operations_api.py @@ -9,6 +9,11 @@ Contact: support@pinecone.io """ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, cast +from multiprocessing.pool import ApplyResult + from pinecone.openapi_support import ApiClient, AsyncioApiClient from pinecone.openapi_support.endpoint_utils import ( ExtraOpenApiKwargsTypedDict, @@ -45,7 +50,7 @@ def __create_namespace( create_namespace_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> NamespaceDescription | ApplyResult[NamespaceDescription]: """Create a namespace # noqa: E501 Create a namespace in a serverless index. For guidance and examples, see [Manage namespaces](https://docs.pinecone.io/guides/manage-data/manage-namespaces). **Note:** This operation is not supported for pod-based indexes. # noqa: E501 @@ -85,7 +90,10 @@ def __create_namespace( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["create_namespace_request"] = create_namespace_request - return self.call_with_http_info(**kwargs) + return cast( + NamespaceDescription | ApplyResult[NamespaceDescription], + self.call_with_http_info(**kwargs), + ) self.create_namespace = _Endpoint( settings={ @@ -124,7 +132,7 @@ def __create_namespace( def __delete_namespace( self, namespace, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> Dict[str, Any] | ApplyResult[Dict[str, Any]]: """Delete a namespace # noqa: E501 Delete a namespace from a serverless index. Deleting a namespace is irreversible; all data in the namespace is permanently deleted. For guidance and examples, see [Manage namespaces](https://docs.pinecone.io/guides/manage-data/manage-namespaces). **Note:** This operation is not supported for pod-based indexes. # noqa: E501 @@ -157,18 +165,20 @@ def __delete_namespace( async_req (bool): execute request asynchronously Returns: - {str: (bool, dict, float, int, list, str, none_type)} + Dict[str, Any] If the method is called asynchronously, returns the request thread. """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["namespace"] = namespace - return self.call_with_http_info(**kwargs) + return cast( + Dict[str, Any] | ApplyResult[Dict[str, Any]], self.call_with_http_info(**kwargs) + ) self.delete_namespace = _Endpoint( settings={ - "response_type": ({str: (bool, dict, float, int, list, str, none_type)},), + "response_type": (Dict[str, Any],), "auth": ["ApiKeyAuth"], "endpoint_path": "/namespaces/{namespace}", "operation_id": "delete_namespace", @@ -200,7 +210,7 @@ def __delete_namespace( def __describe_namespace( self, namespace, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> NamespaceDescription | ApplyResult[NamespaceDescription]: """Describe a namespace # noqa: E501 Describe a namespace in a serverless index, including the total number of vectors in the namespace. For guidance and examples, see [Manage namespaces](https://docs.pinecone.io/guides/manage-data/manage-namespaces). **Note:** This operation is not supported for pod-based indexes. # noqa: E501 @@ -240,7 +250,10 @@ def __describe_namespace( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["namespace"] = namespace - return self.call_with_http_info(**kwargs) + return cast( + NamespaceDescription | ApplyResult[NamespaceDescription], + self.call_with_http_info(**kwargs), + ) self.describe_namespace = _Endpoint( settings={ @@ -276,7 +289,7 @@ def __describe_namespace( def __list_namespaces_operation( self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> ListNamespacesResponse | ApplyResult[ListNamespacesResponse]: """List namespaces # noqa: E501 List all namespaces in a serverless index. Up to 100 namespaces are returned at a time by default, in sorted order (bitwise “C” collation). If the `limit` parameter is set, up to that number of namespaces are returned instead. Whenever there are additional namespaces to return, the response also includes a `pagination_token` that you can use to get the next batch of namespaces. When the response does not include a `pagination_token`, there are no more namespaces to return. For guidance and examples, see [Manage namespaces](https://docs.pinecone.io/guides/manage-data/manage-namespaces). **Note:** This operation is not supported for pod-based indexes. # noqa: E501 @@ -317,7 +330,10 @@ def __list_namespaces_operation( """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return self.call_with_http_info(**kwargs) + return cast( + ListNamespacesResponse | ApplyResult[ListNamespacesResponse], + self.call_with_http_info(**kwargs), + ) self.list_namespaces_operation = _Endpoint( settings={ @@ -377,7 +393,7 @@ def __init__(self, api_client=None) -> None: async def __create_namespace( self, create_namespace_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> NamespaceDescription: """Create a namespace # noqa: E501 Create a namespace in a serverless index. For guidance and examples, see [Manage namespaces](https://docs.pinecone.io/guides/manage-data/manage-namespaces). **Note:** This operation is not supported for pod-based indexes. # noqa: E501 @@ -410,7 +426,7 @@ async def __create_namespace( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["create_namespace_request"] = create_namespace_request - return await self.call_with_http_info(**kwargs) + return cast(NamespaceDescription, await self.call_with_http_info(**kwargs)) self.create_namespace = _AsyncioEndpoint( settings={ @@ -447,7 +463,9 @@ async def __create_namespace( callable=__create_namespace, ) - async def __delete_namespace(self, namespace, x_pinecone_api_version="2025-10", **kwargs): + async def __delete_namespace( + self, namespace, x_pinecone_api_version="2025-10", **kwargs + ) -> Dict[str, Any]: """Delete a namespace # noqa: E501 Delete a namespace from a serverless index. Deleting a namespace is irreversible; all data in the namespace is permanently deleted. For guidance and examples, see [Manage namespaces](https://docs.pinecone.io/guides/manage-data/manage-namespaces). **Note:** This operation is not supported for pod-based indexes. # noqa: E501 @@ -475,16 +493,16 @@ async def __delete_namespace(self, namespace, x_pinecone_api_version="2025-10", Default is True. Returns: - {str: (bool, dict, float, int, list, str, none_type)} + Dict[str, Any] """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["namespace"] = namespace - return await self.call_with_http_info(**kwargs) + return cast(Dict[str, Any], await self.call_with_http_info(**kwargs)) self.delete_namespace = _AsyncioEndpoint( settings={ - "response_type": ({str: (bool, dict, float, int, list, str, none_type)},), + "response_type": (Dict[str, Any],), "auth": ["ApiKeyAuth"], "endpoint_path": "/namespaces/{namespace}", "operation_id": "delete_namespace", @@ -514,7 +532,9 @@ async def __delete_namespace(self, namespace, x_pinecone_api_version="2025-10", callable=__delete_namespace, ) - async def __describe_namespace(self, namespace, x_pinecone_api_version="2025-10", **kwargs): + async def __describe_namespace( + self, namespace, x_pinecone_api_version="2025-10", **kwargs + ) -> NamespaceDescription: """Describe a namespace # noqa: E501 Describe a namespace in a serverless index, including the total number of vectors in the namespace. For guidance and examples, see [Manage namespaces](https://docs.pinecone.io/guides/manage-data/manage-namespaces). **Note:** This operation is not supported for pod-based indexes. # noqa: E501 @@ -547,7 +567,7 @@ async def __describe_namespace(self, namespace, x_pinecone_api_version="2025-10" self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["namespace"] = namespace - return await self.call_with_http_info(**kwargs) + return cast(NamespaceDescription, await self.call_with_http_info(**kwargs)) self.describe_namespace = _AsyncioEndpoint( settings={ @@ -581,7 +601,9 @@ async def __describe_namespace(self, namespace, x_pinecone_api_version="2025-10" callable=__describe_namespace, ) - async def __list_namespaces_operation(self, x_pinecone_api_version="2025-10", **kwargs): + async def __list_namespaces_operation( + self, x_pinecone_api_version="2025-10", **kwargs + ) -> ListNamespacesResponse: """List namespaces # noqa: E501 List all namespaces in a serverless index. Up to 100 namespaces are returned at a time by default, in sorted order (bitwise “C” collation). If the `limit` parameter is set, up to that number of namespaces are returned instead. Whenever there are additional namespaces to return, the response also includes a `pagination_token` that you can use to get the next batch of namespaces. When the response does not include a `pagination_token`, there are no more namespaces to return. For guidance and examples, see [Manage namespaces](https://docs.pinecone.io/guides/manage-data/manage-namespaces). **Note:** This operation is not supported for pod-based indexes. # noqa: E501 @@ -615,7 +637,7 @@ async def __list_namespaces_operation(self, x_pinecone_api_version="2025-10", ** """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return await self.call_with_http_info(**kwargs) + return cast(ListNamespacesResponse, await self.call_with_http_info(**kwargs)) self.list_namespaces_operation = _AsyncioEndpoint( settings={ diff --git a/pinecone/core/openapi/db_data/api/vector_operations_api.py b/pinecone/core/openapi/db_data/api/vector_operations_api.py index d6f1b7652..a317ca445 100644 --- a/pinecone/core/openapi/db_data/api/vector_operations_api.py +++ b/pinecone/core/openapi/db_data/api/vector_operations_api.py @@ -9,6 +9,11 @@ Contact: support@pinecone.io """ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, cast +from multiprocessing.pool import ApplyResult + from pinecone.openapi_support import ApiClient, AsyncioApiClient from pinecone.openapi_support.endpoint_utils import ( ExtraOpenApiKwargsTypedDict, @@ -60,7 +65,7 @@ def __delete_vectors( delete_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> Dict[str, Any] | ApplyResult[Dict[str, Any]]: """Delete vectors # noqa: E501 Delete vectors by id from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/guides/manage-data/delete-data). # noqa: E501 @@ -93,18 +98,20 @@ def __delete_vectors( async_req (bool): execute request asynchronously Returns: - {str: (bool, dict, float, int, list, str, none_type)} + Dict[str, Any] If the method is called asynchronously, returns the request thread. """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["delete_request"] = delete_request - return self.call_with_http_info(**kwargs) + return cast( + Dict[str, Any] | ApplyResult[Dict[str, Any]], self.call_with_http_info(**kwargs) + ) self.delete_vectors = _Endpoint( settings={ - "response_type": ({str: (bool, dict, float, int, list, str, none_type)},), + "response_type": (Dict[str, Any],), "auth": ["ApiKeyAuth"], "endpoint_path": "/vectors/delete", "operation_id": "delete_vectors", @@ -139,7 +146,7 @@ def __describe_index_stats( describe_index_stats_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> IndexDescription | ApplyResult[IndexDescription]: """Get index stats # noqa: E501 Return statistics about the contents of an index, including the vector count per namespace, the number of dimensions, and the index fullness. Serverless indexes scale automatically as needed, so index fullness is relevant only for pod-based indexes. # noqa: E501 @@ -179,7 +186,9 @@ def __describe_index_stats( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["describe_index_stats_request"] = describe_index_stats_request - return self.call_with_http_info(**kwargs) + return cast( + IndexDescription | ApplyResult[IndexDescription], self.call_with_http_info(**kwargs) + ) self.describe_index_stats = _Endpoint( settings={ @@ -218,7 +227,7 @@ def __describe_index_stats( def __fetch_vectors( self, ids, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> FetchResponse | ApplyResult[FetchResponse]: """Fetch vectors # noqa: E501 Look up and return vectors by ID from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/manage-data/fetch-data). # noqa: E501 @@ -259,7 +268,9 @@ def __fetch_vectors( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["ids"] = ids - return self.call_with_http_info(**kwargs) + return cast( + FetchResponse | ApplyResult[FetchResponse], self.call_with_http_info(**kwargs) + ) self.fetch_vectors = _Endpoint( settings={ @@ -307,7 +318,7 @@ def __fetch_vectors_by_metadata( fetch_by_metadata_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> FetchByMetadataResponse | ApplyResult[FetchByMetadataResponse]: """Fetch vectors by metadata # noqa: E501 Look up and return vectors by metadata filter from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/manage-data/fetch-data). # noqa: E501 @@ -347,7 +358,10 @@ def __fetch_vectors_by_metadata( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["fetch_by_metadata_request"] = fetch_by_metadata_request - return self.call_with_http_info(**kwargs) + return cast( + FetchByMetadataResponse | ApplyResult[FetchByMetadataResponse], + self.call_with_http_info(**kwargs), + ) self.fetch_vectors_by_metadata = _Endpoint( settings={ @@ -386,7 +400,7 @@ def __fetch_vectors_by_metadata( def __list_vectors( self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> ListResponse | ApplyResult[ListResponse]: """List vector IDs # noqa: E501 List the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. Returns up to 100 IDs at a time by default in sorted order (bitwise \"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [List record IDs](https://docs.pinecone.io/guides/manage-data/list-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 @@ -428,7 +442,9 @@ def __list_vectors( """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return self.call_with_http_info(**kwargs) + return cast( + ListResponse | ApplyResult[ListResponse], self.call_with_http_info(**kwargs) + ) self.list_vectors = _Endpoint( settings={ @@ -488,7 +504,7 @@ def __query_vectors( query_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> QueryResponse | ApplyResult[QueryResponse]: """Search with a vector # noqa: E501 Search a namespace using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance, examples, and limits, see [Search](https://docs.pinecone.io/guides/search/search-overview). # noqa: E501 @@ -528,7 +544,9 @@ def __query_vectors( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["query_request"] = query_request - return self.call_with_http_info(**kwargs) + return cast( + QueryResponse | ApplyResult[QueryResponse], self.call_with_http_info(**kwargs) + ) self.query_vectors = _Endpoint( settings={ @@ -568,7 +586,7 @@ def __search_records_namespace( search_records_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> SearchRecordsResponse | ApplyResult[SearchRecordsResponse]: """Search with text # noqa: E501 Search a namespace with a query text, query vector, or record ID and return the most similar records, along with their similarity scores. Optionally, rerank the initial results based on their relevance to the query. Searching with text is supported only for indexes with [integrated embedding](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding). Searching with a query vector or record ID is supported for all indexes. For guidance and examples, see [Search](https://docs.pinecone.io/guides/search/search-overview). # noqa: E501 @@ -610,7 +628,10 @@ def __search_records_namespace( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["namespace"] = namespace kwargs["search_records_request"] = search_records_request - return self.call_with_http_info(**kwargs) + return cast( + SearchRecordsResponse | ApplyResult[SearchRecordsResponse], + self.call_with_http_info(**kwargs), + ) self.search_records_namespace = _Endpoint( settings={ @@ -657,7 +678,7 @@ def __update_vector( update_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> UpdateResponse | ApplyResult[UpdateResponse]: """Update a vector # noqa: E501 Update a vector in a namespace. If a value is included, it will overwrite the previous value. If a `set_metadata` is included, the values of the fields specified in it will be added or overwrite the previous value. For guidance and examples, see [Update data](https://docs.pinecone.io/guides/manage-data/update-data). # noqa: E501 @@ -697,7 +718,9 @@ def __update_vector( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["update_request"] = update_request - return self.call_with_http_info(**kwargs) + return cast( + UpdateResponse | ApplyResult[UpdateResponse], self.call_with_http_info(**kwargs) + ) self.update_vector = _Endpoint( settings={ @@ -737,7 +760,7 @@ def __upsert_records_namespace( upsert_record, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> None: """Upsert text # noqa: E501 Upsert text into a namespace. Pinecone converts the text to vectors automatically using the hosted embedding model associated with the index. Upserting text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/reference/api/2025-01/control-plane/create_for_model). For guidance, examples, and limits, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data). # noqa: E501 @@ -779,7 +802,7 @@ def __upsert_records_namespace( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["namespace"] = namespace kwargs["upsert_record"] = upsert_record - return self.call_with_http_info(**kwargs) + return cast(None, self.call_with_http_info(**kwargs)) self.upsert_records_namespace = _Endpoint( settings={ @@ -826,7 +849,7 @@ def __upsert_vectors( upsert_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> UpsertResponse | ApplyResult[UpsertResponse]: """Upsert vectors # noqa: E501 Upsert vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance, examples, and limits, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data). # noqa: E501 @@ -866,7 +889,9 @@ def __upsert_vectors( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["upsert_request"] = upsert_request - return self.call_with_http_info(**kwargs) + return cast( + UpsertResponse | ApplyResult[UpsertResponse], self.call_with_http_info(**kwargs) + ) self.upsert_vectors = _Endpoint( settings={ @@ -914,7 +939,7 @@ def __init__(self, api_client=None) -> None: async def __delete_vectors( self, delete_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> Dict[str, Any]: """Delete vectors # noqa: E501 Delete vectors by id from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/guides/manage-data/delete-data). # noqa: E501 @@ -942,16 +967,16 @@ async def __delete_vectors( Default is True. Returns: - {str: (bool, dict, float, int, list, str, none_type)} + Dict[str, Any] """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["delete_request"] = delete_request - return await self.call_with_http_info(**kwargs) + return cast(Dict[str, Any], await self.call_with_http_info(**kwargs)) self.delete_vectors = _AsyncioEndpoint( settings={ - "response_type": ({str: (bool, dict, float, int, list, str, none_type)},), + "response_type": (Dict[str, Any],), "auth": ["ApiKeyAuth"], "endpoint_path": "/vectors/delete", "operation_id": "delete_vectors", @@ -983,7 +1008,7 @@ async def __delete_vectors( async def __describe_index_stats( self, describe_index_stats_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> IndexDescription: """Get index stats # noqa: E501 Return statistics about the contents of an index, including the vector count per namespace, the number of dimensions, and the index fullness. Serverless indexes scale automatically as needed, so index fullness is relevant only for pod-based indexes. # noqa: E501 @@ -1016,7 +1041,7 @@ async def __describe_index_stats( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["describe_index_stats_request"] = describe_index_stats_request - return await self.call_with_http_info(**kwargs) + return cast(IndexDescription, await self.call_with_http_info(**kwargs)) self.describe_index_stats = _AsyncioEndpoint( settings={ @@ -1053,7 +1078,9 @@ async def __describe_index_stats( callable=__describe_index_stats, ) - async def __fetch_vectors(self, ids, x_pinecone_api_version="2025-10", **kwargs): + async def __fetch_vectors( + self, ids, x_pinecone_api_version="2025-10", **kwargs + ) -> FetchResponse: """Fetch vectors # noqa: E501 Look up and return vectors by ID from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/manage-data/fetch-data). # noqa: E501 @@ -1087,7 +1114,7 @@ async def __fetch_vectors(self, ids, x_pinecone_api_version="2025-10", **kwargs) self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["ids"] = ids - return await self.call_with_http_info(**kwargs) + return cast(FetchResponse, await self.call_with_http_info(**kwargs)) self.fetch_vectors = _AsyncioEndpoint( settings={ @@ -1132,7 +1159,7 @@ async def __fetch_vectors(self, ids, x_pinecone_api_version="2025-10", **kwargs) async def __fetch_vectors_by_metadata( self, fetch_by_metadata_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> FetchByMetadataResponse: """Fetch vectors by metadata # noqa: E501 Look up and return vectors by metadata filter from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/guides/manage-data/fetch-data). # noqa: E501 @@ -1165,7 +1192,7 @@ async def __fetch_vectors_by_metadata( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["fetch_by_metadata_request"] = fetch_by_metadata_request - return await self.call_with_http_info(**kwargs) + return cast(FetchByMetadataResponse, await self.call_with_http_info(**kwargs)) self.fetch_vectors_by_metadata = _AsyncioEndpoint( settings={ @@ -1202,7 +1229,7 @@ async def __fetch_vectors_by_metadata( callable=__fetch_vectors_by_metadata, ) - async def __list_vectors(self, x_pinecone_api_version="2025-10", **kwargs): + async def __list_vectors(self, x_pinecone_api_version="2025-10", **kwargs) -> ListResponse: """List vector IDs # noqa: E501 List the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. Returns up to 100 IDs at a time by default in sorted order (bitwise \"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [List record IDs](https://docs.pinecone.io/guides/manage-data/list-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 @@ -1237,7 +1264,7 @@ async def __list_vectors(self, x_pinecone_api_version="2025-10", **kwargs): """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return await self.call_with_http_info(**kwargs) + return cast(ListResponse, await self.call_with_http_info(**kwargs)) self.list_vectors = _AsyncioEndpoint( settings={ @@ -1292,7 +1319,9 @@ async def __list_vectors(self, x_pinecone_api_version="2025-10", **kwargs): callable=__list_vectors, ) - async def __query_vectors(self, query_request, x_pinecone_api_version="2025-10", **kwargs): + async def __query_vectors( + self, query_request, x_pinecone_api_version="2025-10", **kwargs + ) -> QueryResponse: """Search with a vector # noqa: E501 Search a namespace using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance, examples, and limits, see [Search](https://docs.pinecone.io/guides/search/search-overview). # noqa: E501 @@ -1325,7 +1354,7 @@ async def __query_vectors(self, query_request, x_pinecone_api_version="2025-10", self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["query_request"] = query_request - return await self.call_with_http_info(**kwargs) + return cast(QueryResponse, await self.call_with_http_info(**kwargs)) self.query_vectors = _AsyncioEndpoint( settings={ @@ -1361,7 +1390,7 @@ async def __query_vectors(self, query_request, x_pinecone_api_version="2025-10", async def __search_records_namespace( self, namespace, search_records_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> SearchRecordsResponse: """Search with text # noqa: E501 Search a namespace with a query text, query vector, or record ID and return the most similar records, along with their similarity scores. Optionally, rerank the initial results based on their relevance to the query. Searching with text is supported only for indexes with [integrated embedding](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding). Searching with a query vector or record ID is supported for all indexes. For guidance and examples, see [Search](https://docs.pinecone.io/guides/search/search-overview). # noqa: E501 @@ -1396,7 +1425,7 @@ async def __search_records_namespace( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["namespace"] = namespace kwargs["search_records_request"] = search_records_request - return await self.call_with_http_info(**kwargs) + return cast(SearchRecordsResponse, await self.call_with_http_info(**kwargs)) self.search_records_namespace = _AsyncioEndpoint( settings={ @@ -1438,7 +1467,9 @@ async def __search_records_namespace( callable=__search_records_namespace, ) - async def __update_vector(self, update_request, x_pinecone_api_version="2025-10", **kwargs): + async def __update_vector( + self, update_request, x_pinecone_api_version="2025-10", **kwargs + ) -> UpdateResponse: """Update a vector # noqa: E501 Update a vector in a namespace. If a value is included, it will overwrite the previous value. If a `set_metadata` is included, the values of the fields specified in it will be added or overwrite the previous value. For guidance and examples, see [Update data](https://docs.pinecone.io/guides/manage-data/update-data). # noqa: E501 @@ -1471,7 +1502,7 @@ async def __update_vector(self, update_request, x_pinecone_api_version="2025-10" self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["update_request"] = update_request - return await self.call_with_http_info(**kwargs) + return cast(UpdateResponse, await self.call_with_http_info(**kwargs)) self.update_vector = _AsyncioEndpoint( settings={ @@ -1507,7 +1538,7 @@ async def __update_vector(self, update_request, x_pinecone_api_version="2025-10" async def __upsert_records_namespace( self, namespace, upsert_record, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> None: """Upsert text # noqa: E501 Upsert text into a namespace. Pinecone converts the text to vectors automatically using the hosted embedding model associated with the index. Upserting text is supported only for [indexes with integrated embedding](https://docs.pinecone.io/reference/api/2025-01/control-plane/create_for_model). For guidance, examples, and limits, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data). # noqa: E501 @@ -1542,7 +1573,7 @@ async def __upsert_records_namespace( kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["namespace"] = namespace kwargs["upsert_record"] = upsert_record - return await self.call_with_http_info(**kwargs) + return cast(None, await self.call_with_http_info(**kwargs)) self.upsert_records_namespace = _AsyncioEndpoint( settings={ @@ -1586,7 +1617,7 @@ async def __upsert_records_namespace( async def __upsert_vectors( self, upsert_request, x_pinecone_api_version="2025-10", **kwargs - ): + ) -> UpsertResponse: """Upsert vectors # noqa: E501 Upsert vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance, examples, and limits, see [Upsert data](https://docs.pinecone.io/guides/index-data/upsert-data). # noqa: E501 @@ -1619,7 +1650,7 @@ async def __upsert_vectors( self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["upsert_request"] = upsert_request - return await self.call_with_http_info(**kwargs) + return cast(UpsertResponse, await self.call_with_http_info(**kwargs)) self.upsert_vectors = _AsyncioEndpoint( settings={ diff --git a/pinecone/core/openapi/db_data/model/create_namespace_request.py b/pinecone/core/openapi/db_data/model/create_namespace_request.py index 6ea00aee8..56809af25 100644 --- a/pinecone/core/openapi/db_data/model/create_namespace_request.py +++ b/pinecone/core/openapi/db_data/model/create_namespace_request.py @@ -26,6 +26,13 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.create_namespace_request_schema import ( + CreateNamespaceRequestSchema, + ) + def lazy_import(): from pinecone.core.openapi.db_data.model.create_namespace_request_schema import ( @@ -111,6 +118,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of CreateNamespaceRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], name, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/create_namespace_request_schema.py b/pinecone/core/openapi/db_data/model/create_namespace_request_schema.py index 547e23c58..e8dbfb59b 100644 --- a/pinecone/core/openapi/db_data/model/create_namespace_request_schema.py +++ b/pinecone/core/openapi/db_data/model/create_namespace_request_schema.py @@ -26,6 +26,13 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.create_namespace_request_schema_fields import ( + CreateNamespaceRequestSchemaFields, + ) + def lazy_import(): from pinecone.core.openapi.db_data.model.create_namespace_request_schema_fields import ( @@ -109,6 +116,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of CreateNamespaceRequestSchema. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], fields, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/create_namespace_request_schema_fields.py b/pinecone/core/openapi/db_data/model/create_namespace_request_schema_fields.py index 149eb3aad..421885f12 100644 --- a/pinecone/core/openapi/db_data/model/create_namespace_request_schema_fields.py +++ b/pinecone/core/openapi/db_data/model/create_namespace_request_schema_fields.py @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of CreateNamespaceRequestSchemaFields. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/delete_request.py b/pinecone/core/openapi/db_data/model/delete_request.py index aea6d5974..2412ff7cd 100644 --- a/pinecone/core/openapi/db_data/model/delete_request.py +++ b/pinecone/core/openapi/db_data/model/delete_request.py @@ -87,7 +87,7 @@ def openapi_types(cls): "ids": ([str],), # noqa: E501 "delete_all": (bool,), # noqa: E501 "namespace": (str,), # noqa: E501 - "filter": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "filter": (Dict[str, Any],), # noqa: E501 } @cached_class_property @@ -105,6 +105,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of DeleteRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 @@ -144,7 +155,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 ids ([str]): Vectors to delete. [optional] # noqa: E501 delete_all (bool): This indicates that all vectors in the index namespace should be deleted. [optional] if omitted the server will use the default value of False. # noqa: E501 namespace (str): The namespace to delete vectors from, if applicable. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Delete data](https://docs.pinecone.io/guides/manage-data/delete-data#delete-records-by-metadata). [optional] # noqa: E501 + filter (Dict[str, Any]): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Delete data](https://docs.pinecone.io/guides/manage-data/delete-data#delete-records-by-metadata). [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) @@ -237,7 +248,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ids ([str]): Vectors to delete. [optional] # noqa: E501 delete_all (bool): This indicates that all vectors in the index namespace should be deleted. [optional] if omitted the server will use the default value of False. # noqa: E501 namespace (str): The namespace to delete vectors from, if applicable. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Delete data](https://docs.pinecone.io/guides/manage-data/delete-data#delete-records-by-metadata). [optional] # noqa: E501 + filter (Dict[str, Any]): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See [Delete data](https://docs.pinecone.io/guides/manage-data/delete-data#delete-records-by-metadata). [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) diff --git a/pinecone/core/openapi/db_data/model/describe_index_stats_request.py b/pinecone/core/openapi/db_data/model/describe_index_stats_request.py index 1e4638018..58e88b5c9 100644 --- a/pinecone/core/openapi/db_data/model/describe_index_stats_request.py +++ b/pinecone/core/openapi/db_data/model/describe_index_stats_request.py @@ -84,7 +84,7 @@ def openapi_types(cls): and the value is attribute type. """ return { - "filter": ({str: (bool, dict, float, int, list, str, none_type)},) # noqa: E501 + "filter": (Dict[str, Any],) # noqa: E501 } @cached_class_property @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of DescribeIndexStatsRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 @@ -135,7 +146,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter ({str: (bool, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support filtering `describe_index_stats` by metadata. [optional] # noqa: E501 + filter (Dict[str, Any]): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support filtering `describe_index_stats` by metadata. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) @@ -225,7 +236,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter ({str: (bool, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support filtering `describe_index_stats` by metadata. [optional] # noqa: E501 + filter (Dict[str, Any]): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). Serverless indexes do not support filtering `describe_index_stats` by metadata. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) diff --git a/pinecone/core/openapi/db_data/model/fetch_by_metadata_request.py b/pinecone/core/openapi/db_data/model/fetch_by_metadata_request.py index 3d6d70676..1f3a2ddef 100644 --- a/pinecone/core/openapi/db_data/model/fetch_by_metadata_request.py +++ b/pinecone/core/openapi/db_data/model/fetch_by_metadata_request.py @@ -87,7 +87,7 @@ def openapi_types(cls): """ return { "namespace": (str,), # noqa: E501 - "filter": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "filter": (Dict[str, Any],), # noqa: E501 "limit": (int,), # noqa: E501 "pagination_token": (str,), # noqa: E501 } @@ -107,6 +107,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of FetchByMetadataRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 @@ -144,7 +155,7 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) namespace (str): The namespace to fetch vectors from. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): Metadata filter expression to select vectors. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 + filter (Dict[str, Any]): Metadata filter expression to select vectors. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 limit (int): Max number of vectors to return. [optional] if omitted the server will use the default value of 100. # noqa: E501 pagination_token (str): Pagination token to continue a previous listing operation. [optional] # noqa: E501 """ @@ -237,7 +248,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) namespace (str): The namespace to fetch vectors from. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): Metadata filter expression to select vectors. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 + filter (Dict[str, Any]): Metadata filter expression to select vectors. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 limit (int): Max number of vectors to return. [optional] if omitted the server will use the default value of 100. # noqa: E501 pagination_token (str): Pagination token to continue a previous listing operation. [optional] # noqa: E501 """ diff --git a/pinecone/core/openapi/db_data/model/fetch_by_metadata_response.py b/pinecone/core/openapi/db_data/model/fetch_by_metadata_response.py index e4811b3be..d7c2fbfb8 100644 --- a/pinecone/core/openapi/db_data/model/fetch_by_metadata_response.py +++ b/pinecone/core/openapi/db_data/model/fetch_by_metadata_response.py @@ -26,6 +26,13 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.pagination import Pagination + from pinecone.core.openapi.db_data.model.usage import Usage + from pinecone.core.openapi.db_data.model.vector import Vector + def lazy_import(): from pinecone.core.openapi.db_data.model.pagination import Pagination @@ -117,6 +124,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of FetchByMetadataResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/fetch_response.py b/pinecone/core/openapi/db_data/model/fetch_response.py index 8d39fa6bb..72a4783a3 100644 --- a/pinecone/core/openapi/db_data/model/fetch_response.py +++ b/pinecone/core/openapi/db_data/model/fetch_response.py @@ -26,6 +26,12 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.usage import Usage + from pinecone.core.openapi.db_data.model.vector import Vector + def lazy_import(): from pinecone.core.openapi.db_data.model.usage import Usage @@ -113,6 +119,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of FetchResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/hit.py b/pinecone/core/openapi/db_data/model/hit.py index 0f5970d94..397d28f86 100644 --- a/pinecone/core/openapi/db_data/model/hit.py +++ b/pinecone/core/openapi/db_data/model/hit.py @@ -86,7 +86,7 @@ def openapi_types(cls): return { "_id": (str,), # noqa: E501 "_score": (float,), # noqa: E501 - "fields": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "fields": (Dict[str, Any],), # noqa: E501 } @cached_class_property @@ -103,6 +103,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of Hit. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], _id, _score, fields, *args, **kwargs) -> T: # noqa: E501 @@ -111,7 +122,7 @@ def _from_openapi_data(cls: Type[T], _id, _score, fields, *args, **kwargs) -> T: Args: _id (str): The record id of the search hit. _score (float): The similarity score of the returned record. - fields ({str: (bool, dict, float, int, list, str, none_type)}): The selected record fields associated with the search hit. + fields (Dict[str, Any]): The selected record fields associated with the search hit. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -208,7 +219,7 @@ def __init__(self, _id, _score, fields, *args, **kwargs) -> None: # noqa: E501 Args: _id (str): The record id of the search hit. _score (float): The similarity score of the returned record. - fields ({str: (bool, dict, float, int, list, str, none_type)}): The selected record fields associated with the search hit. + fields (Dict[str, Any]): The selected record fields associated with the search hit. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types diff --git a/pinecone/core/openapi/db_data/model/import_error_mode.py b/pinecone/core/openapi/db_data/model/import_error_mode.py index 955603b3b..a06e01640 100644 --- a/pinecone/core/openapi/db_data/model/import_error_mode.py +++ b/pinecone/core/openapi/db_data/model/import_error_mode.py @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ImportErrorMode. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/import_model.py b/pinecone/core/openapi/db_data/model/import_model.py index fe666b89a..98333a825 100644 --- a/pinecone/core/openapi/db_data/model/import_model.py +++ b/pinecone/core/openapi/db_data/model/import_model.py @@ -116,6 +116,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ImportModel. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/index_description.py b/pinecone/core/openapi/db_data/model/index_description.py index d49b93cdb..0dbc89bc0 100644 --- a/pinecone/core/openapi/db_data/model/index_description.py +++ b/pinecone/core/openapi/db_data/model/index_description.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.namespace_summary import NamespaceSummary + def lazy_import(): from pinecone.core.openapi.db_data.model.namespace_summary import NamespaceSummary @@ -121,6 +126,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of IndexDescription. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/list_imports_response.py b/pinecone/core/openapi/db_data/model/list_imports_response.py index 378a35ba0..b3cc47177 100644 --- a/pinecone/core/openapi/db_data/model/list_imports_response.py +++ b/pinecone/core/openapi/db_data/model/list_imports_response.py @@ -26,6 +26,12 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.import_model import ImportModel + from pinecone.core.openapi.db_data.model.pagination import Pagination + def lazy_import(): from pinecone.core.openapi.db_data.model.import_model import ImportModel @@ -111,6 +117,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ListImportsResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/list_item.py b/pinecone/core/openapi/db_data/model/list_item.py index 420eb710b..6fd00f857 100644 --- a/pinecone/core/openapi/db_data/model/list_item.py +++ b/pinecone/core/openapi/db_data/model/list_item.py @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ListItem. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/list_namespaces_response.py b/pinecone/core/openapi/db_data/model/list_namespaces_response.py index 7320854aa..54037cf65 100644 --- a/pinecone/core/openapi/db_data/model/list_namespaces_response.py +++ b/pinecone/core/openapi/db_data/model/list_namespaces_response.py @@ -26,6 +26,12 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.namespace_description import NamespaceDescription + from pinecone.core.openapi.db_data.model.pagination import Pagination + def lazy_import(): from pinecone.core.openapi.db_data.model.namespace_description import NamespaceDescription @@ -113,6 +119,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ListNamespacesResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/list_response.py b/pinecone/core/openapi/db_data/model/list_response.py index 50aef17f5..c599e9a0e 100644 --- a/pinecone/core/openapi/db_data/model/list_response.py +++ b/pinecone/core/openapi/db_data/model/list_response.py @@ -26,6 +26,13 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.list_item import ListItem + from pinecone.core.openapi.db_data.model.pagination import Pagination + from pinecone.core.openapi.db_data.model.usage import Usage + def lazy_import(): from pinecone.core.openapi.db_data.model.list_item import ListItem @@ -117,6 +124,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ListResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/namespace_description.py b/pinecone/core/openapi/db_data/model/namespace_description.py index 0127e3652..f419983b0 100644 --- a/pinecone/core/openapi/db_data/model/namespace_description.py +++ b/pinecone/core/openapi/db_data/model/namespace_description.py @@ -26,6 +26,16 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.create_namespace_request_schema import ( + CreateNamespaceRequestSchema, + ) + from pinecone.core.openapi.db_data.model.namespace_description_indexed_fields import ( + NamespaceDescriptionIndexedFields, + ) + def lazy_import(): from pinecone.core.openapi.db_data.model.create_namespace_request_schema import ( @@ -119,6 +129,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of NamespaceDescription. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/namespace_description_indexed_fields.py b/pinecone/core/openapi/db_data/model/namespace_description_indexed_fields.py index edd8ace1d..1272d78f9 100644 --- a/pinecone/core/openapi/db_data/model/namespace_description_indexed_fields.py +++ b/pinecone/core/openapi/db_data/model/namespace_description_indexed_fields.py @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of NamespaceDescriptionIndexedFields. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/namespace_summary.py b/pinecone/core/openapi/db_data/model/namespace_summary.py index b6ef77ab2..a7f1ad9df 100644 --- a/pinecone/core/openapi/db_data/model/namespace_summary.py +++ b/pinecone/core/openapi/db_data/model/namespace_summary.py @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of NamespaceSummary. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/pagination.py b/pinecone/core/openapi/db_data/model/pagination.py index 374562312..70492aca3 100644 --- a/pinecone/core/openapi/db_data/model/pagination.py +++ b/pinecone/core/openapi/db_data/model/pagination.py @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of Pagination. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/protobuf_any.py b/pinecone/core/openapi/db_data/model/protobuf_any.py index 1dc76bc39..20e694821 100644 --- a/pinecone/core/openapi/db_data/model/protobuf_any.py +++ b/pinecone/core/openapi/db_data/model/protobuf_any.py @@ -101,6 +101,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ProtobufAny. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/query_request.py b/pinecone/core/openapi/db_data/model/query_request.py index 88d12c238..9d4cef11b 100644 --- a/pinecone/core/openapi/db_data/model/query_request.py +++ b/pinecone/core/openapi/db_data/model/query_request.py @@ -26,6 +26,12 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.query_vector import QueryVector + from pinecone.core.openapi.db_data.model.sparse_values import SparseValues + def lazy_import(): from pinecone.core.openapi.db_data.model.query_vector import QueryVector @@ -101,7 +107,7 @@ def openapi_types(cls): return { "top_k": (int,), # noqa: E501 "namespace": (str,), # noqa: E501 - "filter": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "filter": (Dict[str, Any],), # noqa: E501 "include_values": (bool,), # noqa: E501 "include_metadata": (bool,), # noqa: E501 "queries": ([QueryVector],), # noqa: E501 @@ -130,6 +136,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of QueryRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 @@ -170,7 +187,7 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) namespace (str): The namespace to query. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 + filter (Dict[str, Any]): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 include_values (bool): Indicates whether vector values are included in the response. [optional] if omitted the server will use the default value of False. # noqa: E501 include_metadata (bool): Indicates whether metadata is included in the response as well as the ids. [optional] if omitted the server will use the default value of False. # noqa: E501 queries ([QueryVector]): DEPRECATED. Use `vector` or `id` instead. [optional] # noqa: E501 @@ -271,7 +288,7 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) namespace (str): The namespace to query. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 + filter (Dict[str, Any]): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 include_values (bool): Indicates whether vector values are included in the response. [optional] if omitted the server will use the default value of False. # noqa: E501 include_metadata (bool): Indicates whether metadata is included in the response as well as the ids. [optional] if omitted the server will use the default value of False. # noqa: E501 queries ([QueryVector]): DEPRECATED. Use `vector` or `id` instead. [optional] # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/query_response.py b/pinecone/core/openapi/db_data/model/query_response.py index a28efa5fc..e9f19c72d 100644 --- a/pinecone/core/openapi/db_data/model/query_response.py +++ b/pinecone/core/openapi/db_data/model/query_response.py @@ -26,6 +26,13 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.scored_vector import ScoredVector + from pinecone.core.openapi.db_data.model.single_query_results import SingleQueryResults + from pinecone.core.openapi.db_data.model.usage import Usage + def lazy_import(): from pinecone.core.openapi.db_data.model.scored_vector import ScoredVector @@ -117,6 +124,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of QueryResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/query_vector.py b/pinecone/core/openapi/db_data/model/query_vector.py index 4e8f3be20..d40d59736 100644 --- a/pinecone/core/openapi/db_data/model/query_vector.py +++ b/pinecone/core/openapi/db_data/model/query_vector.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.sparse_values import SparseValues + def lazy_import(): from pinecone.core.openapi.db_data.model.sparse_values import SparseValues @@ -99,7 +104,7 @@ def openapi_types(cls): "sparse_values": (SparseValues,), # noqa: E501 "top_k": (int,), # noqa: E501 "namespace": (str,), # noqa: E501 - "filter": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "filter": (Dict[str, Any],), # noqa: E501 } @cached_class_property @@ -118,6 +123,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of QueryVector. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], values, *args, **kwargs) -> T: # noqa: E501 @@ -160,7 +176,7 @@ def _from_openapi_data(cls: Type[T], values, *args, **kwargs) -> T: # noqa: E50 sparse_values (SparseValues): [optional] # noqa: E501 top_k (int): An override for the number of results to return for this query vector. [optional] # noqa: E501 namespace (str): An override the namespace to search. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): An override for the metadata filter to apply. This replaces the request-level filter. [optional] # noqa: E501 + filter (Dict[str, Any]): An override for the metadata filter to apply. This replaces the request-level filter. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) @@ -257,7 +273,7 @@ def __init__(self, values, *args, **kwargs) -> None: # noqa: E501 sparse_values (SparseValues): [optional] # noqa: E501 top_k (int): An override for the number of results to return for this query vector. [optional] # noqa: E501 namespace (str): An override the namespace to search. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): An override for the metadata filter to apply. This replaces the request-level filter. [optional] # noqa: E501 + filter (Dict[str, Any]): An override for the metadata filter to apply. This replaces the request-level filter. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) diff --git a/pinecone/core/openapi/db_data/model/rpc_status.py b/pinecone/core/openapi/db_data/model/rpc_status.py index 8feaf20d5..ef015f5d9 100644 --- a/pinecone/core/openapi/db_data/model/rpc_status.py +++ b/pinecone/core/openapi/db_data/model/rpc_status.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.protobuf_any import ProtobufAny + def lazy_import(): from pinecone.core.openapi.db_data.model.protobuf_any import ProtobufAny @@ -111,6 +116,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of RpcStatus. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/scored_vector.py b/pinecone/core/openapi/db_data/model/scored_vector.py index a18f7d7e8..2c664318c 100644 --- a/pinecone/core/openapi/db_data/model/scored_vector.py +++ b/pinecone/core/openapi/db_data/model/scored_vector.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.sparse_values import SparseValues + def lazy_import(): from pinecone.core.openapi.db_data.model.sparse_values import SparseValues @@ -98,7 +103,7 @@ def openapi_types(cls): "score": (float,), # noqa: E501 "values": ([float],), # noqa: E501 "sparse_values": (SparseValues,), # noqa: E501 - "metadata": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "metadata": (Dict[str, Any],), # noqa: E501 } @cached_class_property @@ -117,6 +122,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ScoredVector. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 @@ -159,7 +175,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 score (float): This is a measure of similarity between this vector and the query vector. The higher the score, the more they are similar. [optional] # noqa: E501 values ([float]): This is the vector data, if it is requested. [optional] # noqa: E501 sparse_values (SparseValues): [optional] # noqa: E501 - metadata ({str: (bool, dict, float, int, list, str, none_type)}): This is the metadata, if it is requested. [optional] # noqa: E501 + metadata (Dict[str, Any]): This is the metadata, if it is requested. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) @@ -256,7 +272,7 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 score (float): This is a measure of similarity between this vector and the query vector. The higher the score, the more they are similar. [optional] # noqa: E501 values ([float]): This is the vector data, if it is requested. [optional] # noqa: E501 sparse_values (SparseValues): [optional] # noqa: E501 - metadata ({str: (bool, dict, float, int, list, str, none_type)}): This is the metadata, if it is requested. [optional] # noqa: E501 + metadata (Dict[str, Any]): This is the metadata, if it is requested. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) diff --git a/pinecone/core/openapi/db_data/model/search_match_terms.py b/pinecone/core/openapi/db_data/model/search_match_terms.py index c5d59569f..605b2093a 100644 --- a/pinecone/core/openapi/db_data/model/search_match_terms.py +++ b/pinecone/core/openapi/db_data/model/search_match_terms.py @@ -101,6 +101,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of SearchMatchTerms. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/search_records_request.py b/pinecone/core/openapi/db_data/model/search_records_request.py index 1030ef90d..9505a80dc 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request.py +++ b/pinecone/core/openapi/db_data/model/search_records_request.py @@ -26,6 +26,16 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.search_records_request_query import ( + SearchRecordsRequestQuery, + ) + from pinecone.core.openapi.db_data.model.search_records_request_rerank import ( + SearchRecordsRequestRerank, + ) + def lazy_import(): from pinecone.core.openapi.db_data.model.search_records_request_query import ( @@ -117,6 +127,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of SearchRecordsRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], query, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/search_records_request_query.py b/pinecone/core/openapi/db_data/model/search_records_request_query.py index 68d3a3da6..b77aedf85 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request_query.py +++ b/pinecone/core/openapi/db_data/model/search_records_request_query.py @@ -26,6 +26,12 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.search_match_terms import SearchMatchTerms + from pinecone.core.openapi.db_data.model.search_records_vector import SearchRecordsVector + def lazy_import(): from pinecone.core.openapi.db_data.model.search_match_terms import SearchMatchTerms @@ -95,8 +101,8 @@ def openapi_types(cls): lazy_import() return { "top_k": (int,), # noqa: E501 - "filter": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 - "inputs": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "filter": (Dict[str, Any],), # noqa: E501 + "inputs": (Dict[str, Any],), # noqa: E501 "vector": (SearchRecordsVector,), # noqa: E501 "id": (str,), # noqa: E501 "match_terms": (SearchMatchTerms,), # noqa: E501 @@ -119,6 +125,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of SearchRecordsRequestQuery. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 @@ -158,8 +175,8 @@ def _from_openapi_data(cls: Type[T], top_k, *args, **kwargs) -> T: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 - inputs ({str: (bool, dict, float, int, list, str, none_type)}): [optional] # noqa: E501 + filter (Dict[str, Any]): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 + inputs (Dict[str, Any]): [optional] # noqa: E501 vector (SearchRecordsVector): [optional] # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. [optional] # noqa: E501 match_terms (SearchMatchTerms): [optional] # noqa: E501 @@ -256,8 +273,8 @@ def __init__(self, top_k, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - filter ({str: (bool, dict, float, int, list, str, none_type)}): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 - inputs ({str: (bool, dict, float, int, list, str, none_type)}): [optional] # noqa: E501 + filter (Dict[str, Any]): The filter to apply. You can use vector metadata to limit your search. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 + inputs (Dict[str, Any]): [optional] # noqa: E501 vector (SearchRecordsVector): [optional] # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. [optional] # noqa: E501 match_terms (SearchMatchTerms): [optional] # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/search_records_request_rerank.py b/pinecone/core/openapi/db_data/model/search_records_request_rerank.py index c52907d05..81cd31a1d 100644 --- a/pinecone/core/openapi/db_data/model/search_records_request_rerank.py +++ b/pinecone/core/openapi/db_data/model/search_records_request_rerank.py @@ -87,7 +87,7 @@ def openapi_types(cls): "model": (str,), # noqa: E501 "rank_fields": ([str],), # noqa: E501 "top_n": (int,), # noqa: E501 - "parameters": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "parameters": (Dict[str, Any],), # noqa: E501 "query": (str,), # noqa: E501 } @@ -107,6 +107,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of SearchRecordsRequestRerank. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], model, rank_fields, *args, **kwargs) -> T: # noqa: E501 @@ -148,7 +159,7 @@ def _from_openapi_data(cls: Type[T], model, rank_fields, *args, **kwargs) -> T: through its discriminator because we passed in _visited_composed_classes = (Animal,) top_n (int): The number of top results to return after reranking. Defaults to top_k. [optional] # noqa: E501 - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 + parameters (Dict[str, Any]): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 query (str): The query to rerank documents against. If a specific rerank query is specified, it overwrites the query input that was provided at the top level. [optional] # noqa: E501 """ @@ -246,7 +257,7 @@ def __init__(self, model, rank_fields, *args, **kwargs) -> None: # noqa: E501 through its discriminator because we passed in _visited_composed_classes = (Animal,) top_n (int): The number of top results to return after reranking. Defaults to top_k. [optional] # noqa: E501 - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 + parameters (Dict[str, Any]): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 query (str): The query to rerank documents against. If a specific rerank query is specified, it overwrites the query input that was provided at the top level. [optional] # noqa: E501 """ diff --git a/pinecone/core/openapi/db_data/model/search_records_response.py b/pinecone/core/openapi/db_data/model/search_records_response.py index 0fead75f3..c5ea7524a 100644 --- a/pinecone/core/openapi/db_data/model/search_records_response.py +++ b/pinecone/core/openapi/db_data/model/search_records_response.py @@ -26,6 +26,14 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.search_records_response_result import ( + SearchRecordsResponseResult, + ) + from pinecone.core.openapi.db_data.model.search_usage import SearchUsage + def lazy_import(): from pinecone.core.openapi.db_data.model.search_records_response_result import ( @@ -113,6 +121,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of SearchRecordsResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], result, usage, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/search_records_response_result.py b/pinecone/core/openapi/db_data/model/search_records_response_result.py index 477da0a16..0407075ef 100644 --- a/pinecone/core/openapi/db_data/model/search_records_response_result.py +++ b/pinecone/core/openapi/db_data/model/search_records_response_result.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.hit import Hit + def lazy_import(): from pinecone.core.openapi.db_data.model.hit import Hit @@ -107,6 +112,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of SearchRecordsResponseResult. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], hits, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/search_records_vector.py b/pinecone/core/openapi/db_data/model/search_records_vector.py index 15868d042..09729daef 100644 --- a/pinecone/core/openapi/db_data/model/search_records_vector.py +++ b/pinecone/core/openapi/db_data/model/search_records_vector.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.vector_values import VectorValues + def lazy_import(): from pinecone.core.openapi.db_data.model.vector_values import VectorValues @@ -111,6 +116,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of SearchRecordsVector. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/search_usage.py b/pinecone/core/openapi/db_data/model/search_usage.py index cc7e1f795..2de18f899 100644 --- a/pinecone/core/openapi/db_data/model/search_usage.py +++ b/pinecone/core/openapi/db_data/model/search_usage.py @@ -107,6 +107,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of SearchUsage. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], read_units, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/single_query_results.py b/pinecone/core/openapi/db_data/model/single_query_results.py index 94e041d27..1dbf183b1 100644 --- a/pinecone/core/openapi/db_data/model/single_query_results.py +++ b/pinecone/core/openapi/db_data/model/single_query_results.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.scored_vector import ScoredVector + def lazy_import(): from pinecone.core.openapi.db_data.model.scored_vector import ScoredVector @@ -109,6 +114,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of SingleQueryResults. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/sparse_values.py b/pinecone/core/openapi/db_data/model/sparse_values.py index 7670fc295..541e3e18a 100644 --- a/pinecone/core/openapi/db_data/model/sparse_values.py +++ b/pinecone/core/openapi/db_data/model/sparse_values.py @@ -104,6 +104,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of SparseValues. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], indices, values, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/start_import_request.py b/pinecone/core/openapi/db_data/model/start_import_request.py index 351f05cd8..28ab505d8 100644 --- a/pinecone/core/openapi/db_data/model/start_import_request.py +++ b/pinecone/core/openapi/db_data/model/start_import_request.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.import_error_mode import ImportErrorMode + def lazy_import(): from pinecone.core.openapi.db_data.model.import_error_mode import ImportErrorMode @@ -114,6 +119,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of StartImportRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], uri, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/start_import_response.py b/pinecone/core/openapi/db_data/model/start_import_response.py index a34ccf9ac..3e3115b07 100644 --- a/pinecone/core/openapi/db_data/model/start_import_response.py +++ b/pinecone/core/openapi/db_data/model/start_import_response.py @@ -101,6 +101,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of StartImportResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/update_request.py b/pinecone/core/openapi/db_data/model/update_request.py index 92786fceb..79ff7c599 100644 --- a/pinecone/core/openapi/db_data/model/update_request.py +++ b/pinecone/core/openapi/db_data/model/update_request.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.sparse_values import SparseValues + def lazy_import(): from pinecone.core.openapi.db_data.model.sparse_values import SparseValues @@ -98,9 +103,9 @@ def openapi_types(cls): "id": (str,), # noqa: E501 "values": ([float],), # noqa: E501 "sparse_values": (SparseValues,), # noqa: E501 - "set_metadata": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "set_metadata": (Dict[str, Any],), # noqa: E501 "namespace": (str,), # noqa: E501 - "filter": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "filter": (Dict[str, Any],), # noqa: E501 "dry_run": (bool,), # noqa: E501 } @@ -122,6 +127,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of UpdateRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 @@ -161,9 +177,9 @@ def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 id (str): Vector's unique id. [optional] # noqa: E501 values ([float]): Vector data. [optional] # noqa: E501 sparse_values (SparseValues): [optional] # noqa: E501 - set_metadata ({str: (bool, dict, float, int, list, str, none_type)}): Metadata to set for the vector. [optional] # noqa: E501 + set_metadata (Dict[str, Any]): Metadata to set for the vector. [optional] # noqa: E501 namespace (str): The namespace containing the vector to update. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): A metadata filter expression. When updating metadata across records in a namespace, the update is applied to all records that match the filter. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 + filter (Dict[str, Any]): A metadata filter expression. When updating metadata across records in a namespace, the update is applied to all records that match the filter. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 dry_run (bool): If `true`, return the number of records that match the `filter`, but do not execute the update. Default is `false`. [optional] if omitted the server will use the default value of False. # noqa: E501 """ @@ -257,9 +273,9 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 id (str): Vector's unique id. [optional] # noqa: E501 values ([float]): Vector data. [optional] # noqa: E501 sparse_values (SparseValues): [optional] # noqa: E501 - set_metadata ({str: (bool, dict, float, int, list, str, none_type)}): Metadata to set for the vector. [optional] # noqa: E501 + set_metadata (Dict[str, Any]): Metadata to set for the vector. [optional] # noqa: E501 namespace (str): The namespace containing the vector to update. [optional] # noqa: E501 - filter ({str: (bool, dict, float, int, list, str, none_type)}): A metadata filter expression. When updating metadata across records in a namespace, the update is applied to all records that match the filter. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 + filter (Dict[str, Any]): A metadata filter expression. When updating metadata across records in a namespace, the update is applied to all records that match the filter. See [Understanding metadata](https://docs.pinecone.io/guides/index-data/indexing-overview#metadata). [optional] # noqa: E501 dry_run (bool): If `true`, return the number of records that match the `filter`, but do not execute the update. Default is `false`. [optional] if omitted the server will use the default value of False. # noqa: E501 """ diff --git a/pinecone/core/openapi/db_data/model/update_response.py b/pinecone/core/openapi/db_data/model/update_response.py index 8b4a63c1f..61c8d6674 100644 --- a/pinecone/core/openapi/db_data/model/update_response.py +++ b/pinecone/core/openapi/db_data/model/update_response.py @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of UpdateResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/upsert_record.py b/pinecone/core/openapi/db_data/model/upsert_record.py index 42e97e114..62e9322d3 100644 --- a/pinecone/core/openapi/db_data/model/upsert_record.py +++ b/pinecone/core/openapi/db_data/model/upsert_record.py @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of UpsertRecord. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], _id, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/upsert_request.py b/pinecone/core/openapi/db_data/model/upsert_request.py index 2d0167316..94739ac6f 100644 --- a/pinecone/core/openapi/db_data/model/upsert_request.py +++ b/pinecone/core/openapi/db_data/model/upsert_request.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.vector import Vector + def lazy_import(): from pinecone.core.openapi.db_data.model.vector import Vector @@ -109,6 +114,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of UpsertRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], vectors, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/upsert_response.py b/pinecone/core/openapi/db_data/model/upsert_response.py index 7a53c74fb..0e2c7c4ac 100644 --- a/pinecone/core/openapi/db_data/model/upsert_response.py +++ b/pinecone/core/openapi/db_data/model/upsert_response.py @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of UpsertResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/usage.py b/pinecone/core/openapi/db_data/model/usage.py index 5710338ef..a8c04bc45 100644 --- a/pinecone/core/openapi/db_data/model/usage.py +++ b/pinecone/core/openapi/db_data/model/usage.py @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of Usage. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/db_data/model/vector.py b/pinecone/core/openapi/db_data/model/vector.py index d5ae043ed..453552b09 100644 --- a/pinecone/core/openapi/db_data/model/vector.py +++ b/pinecone/core/openapi/db_data/model/vector.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.db_data.model.sparse_values import SparseValues + def lazy_import(): from pinecone.core.openapi.db_data.model.sparse_values import SparseValues @@ -98,7 +103,7 @@ def openapi_types(cls): "id": (str,), # noqa: E501 "values": ([float],), # noqa: E501 "sparse_values": (SparseValues,), # noqa: E501 - "metadata": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "metadata": (Dict[str, Any],), # noqa: E501 } @cached_class_property @@ -116,6 +121,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of Vector. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 @@ -157,7 +173,7 @@ def _from_openapi_data(cls: Type[T], id, *args, **kwargs) -> T: # noqa: E501 _visited_composed_classes = (Animal,) values ([float]): This is the vector data included in the request. [optional] # noqa: E501 sparse_values (SparseValues): [optional] # noqa: E501 - metadata ({str: (bool, dict, float, int, list, str, none_type)}): This is the metadata included in the request. [optional] # noqa: E501 + metadata (Dict[str, Any]): This is the metadata included in the request. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) @@ -253,7 +269,7 @@ def __init__(self, id, *args, **kwargs) -> None: # noqa: E501 _visited_composed_classes = (Animal,) values ([float]): This is the vector data included in the request. [optional] # noqa: E501 sparse_values (SparseValues): [optional] # noqa: E501 - metadata ({str: (bool, dict, float, int, list, str, none_type)}): This is the metadata included in the request. [optional] # noqa: E501 + metadata (Dict[str, Any]): This is the metadata included in the request. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) diff --git a/pinecone/core/openapi/inference/api/inference_api.py b/pinecone/core/openapi/inference/api/inference_api.py index 63b7a43ca..1e38938f8 100644 --- a/pinecone/core/openapi/inference/api/inference_api.py +++ b/pinecone/core/openapi/inference/api/inference_api.py @@ -9,6 +9,11 @@ Contact: support@pinecone.io """ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, cast +from multiprocessing.pool import ApplyResult + from pinecone.openapi_support import ApiClient, AsyncioApiClient from pinecone.openapi_support.endpoint_utils import ( ExtraOpenApiKwargsTypedDict, @@ -43,7 +48,9 @@ def __init__(self, api_client=None) -> None: api_client = ApiClient() self.api_client = api_client - def __embed(self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict): + def __embed( + self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict + ) -> EmbeddingsList | ApplyResult[EmbeddingsList]: """Generate vectors # noqa: E501 Generate vector embeddings for input data. This endpoint uses Pinecone's [hosted embedding models](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models). # noqa: E501 @@ -82,7 +89,9 @@ def __embed(self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargs """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return self.call_with_http_info(**kwargs) + return cast( + EmbeddingsList | ApplyResult[EmbeddingsList], self.call_with_http_info(**kwargs) + ) self.embed = _Endpoint( settings={ @@ -121,7 +130,7 @@ def __get_model( model_name, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> ModelInfo | ApplyResult[ModelInfo]: """Describe a model # noqa: E501 Get a description of a model hosted by Pinecone. You can use hosted models as an integrated part of Pinecone operations or for standalone embedding and reranking. For more details, see [Vector embedding](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding) and [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 @@ -161,7 +170,7 @@ def __get_model( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["model_name"] = model_name - return self.call_with_http_info(**kwargs) + return cast(ModelInfo | ApplyResult[ModelInfo], self.call_with_http_info(**kwargs)) self.get_model = _Endpoint( settings={ @@ -197,7 +206,7 @@ def __get_model( def __list_models( self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict - ): + ) -> ModelInfoList | ApplyResult[ModelInfoList]: """List available models # noqa: E501 List the embedding and reranking models hosted by Pinecone. You can use hosted models as an integrated part of Pinecone operations or for standalone embedding and reranking. For more details, see [Vector embedding](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding) and [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 @@ -237,7 +246,9 @@ def __list_models( """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return self.call_with_http_info(**kwargs) + return cast( + ModelInfoList | ApplyResult[ModelInfoList], self.call_with_http_info(**kwargs) + ) self.list_models = _Endpoint( settings={ @@ -280,7 +291,9 @@ def __list_models( callable=__list_models, ) - def __rerank(self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict): + def __rerank( + self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict + ) -> RerankResult | ApplyResult[RerankResult]: """Rerank results # noqa: E501 Rerank results according to their relevance to a query. For guidance and examples, see [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 @@ -319,7 +332,9 @@ def __rerank(self, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwarg """ kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return self.call_with_http_info(**kwargs) + return cast( + RerankResult | ApplyResult[RerankResult], self.call_with_http_info(**kwargs) + ) self.rerank = _Endpoint( settings={ @@ -365,7 +380,7 @@ def __init__(self, api_client=None) -> None: api_client = AsyncioApiClient() self.api_client = api_client - async def __embed(self, x_pinecone_api_version="2025-10", **kwargs): + async def __embed(self, x_pinecone_api_version="2025-10", **kwargs) -> EmbeddingsList: """Generate vectors # noqa: E501 Generate vector embeddings for input data. This endpoint uses Pinecone's [hosted embedding models](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models). # noqa: E501 @@ -397,7 +412,7 @@ async def __embed(self, x_pinecone_api_version="2025-10", **kwargs): """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return await self.call_with_http_info(**kwargs) + return cast(EmbeddingsList, await self.call_with_http_info(**kwargs)) self.embed = _AsyncioEndpoint( settings={ @@ -431,7 +446,9 @@ async def __embed(self, x_pinecone_api_version="2025-10", **kwargs): callable=__embed, ) - async def __get_model(self, model_name, x_pinecone_api_version="2025-10", **kwargs): + async def __get_model( + self, model_name, x_pinecone_api_version="2025-10", **kwargs + ) -> ModelInfo: """Describe a model # noqa: E501 Get a description of a model hosted by Pinecone. You can use hosted models as an integrated part of Pinecone operations or for standalone embedding and reranking. For more details, see [Vector embedding](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding) and [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 @@ -464,7 +481,7 @@ async def __get_model(self, model_name, x_pinecone_api_version="2025-10", **kwar self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["model_name"] = model_name - return await self.call_with_http_info(**kwargs) + return cast(ModelInfo, await self.call_with_http_info(**kwargs)) self.get_model = _AsyncioEndpoint( settings={ @@ -498,7 +515,7 @@ async def __get_model(self, model_name, x_pinecone_api_version="2025-10", **kwar callable=__get_model, ) - async def __list_models(self, x_pinecone_api_version="2025-10", **kwargs): + async def __list_models(self, x_pinecone_api_version="2025-10", **kwargs) -> ModelInfoList: """List available models # noqa: E501 List the embedding and reranking models hosted by Pinecone. You can use hosted models as an integrated part of Pinecone operations or for standalone embedding and reranking. For more details, see [Vector embedding](https://docs.pinecone.io/guides/index-data/indexing-overview#vector-embedding) and [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 @@ -531,7 +548,7 @@ async def __list_models(self, x_pinecone_api_version="2025-10", **kwargs): """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return await self.call_with_http_info(**kwargs) + return cast(ModelInfoList, await self.call_with_http_info(**kwargs)) self.list_models = _AsyncioEndpoint( settings={ @@ -574,7 +591,7 @@ async def __list_models(self, x_pinecone_api_version="2025-10", **kwargs): callable=__list_models, ) - async def __rerank(self, x_pinecone_api_version="2025-10", **kwargs): + async def __rerank(self, x_pinecone_api_version="2025-10", **kwargs) -> RerankResult: """Rerank results # noqa: E501 Rerank results according to their relevance to a query. For guidance and examples, see [Rerank results](https://docs.pinecone.io/guides/search/rerank-results). # noqa: E501 @@ -606,7 +623,7 @@ async def __rerank(self, x_pinecone_api_version="2025-10", **kwargs): """ self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version - return await self.call_with_http_info(**kwargs) + return cast(RerankResult, await self.call_with_http_info(**kwargs)) self.rerank = _AsyncioEndpoint( settings={ diff --git a/pinecone/core/openapi/inference/model/dense_embedding.py b/pinecone/core/openapi/inference/model/dense_embedding.py index 37452cd28..fc6149be9 100644 --- a/pinecone/core/openapi/inference/model/dense_embedding.py +++ b/pinecone/core/openapi/inference/model/dense_embedding.py @@ -101,6 +101,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of DenseEmbedding. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], values, vector_type, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/inference/model/document.py b/pinecone/core/openapi/inference/model/document.py index 6151a77a1..7aaa39df7 100644 --- a/pinecone/core/openapi/inference/model/document.py +++ b/pinecone/core/openapi/inference/model/document.py @@ -95,6 +95,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of Document. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/inference/model/embed_request.py b/pinecone/core/openapi/inference/model/embed_request.py index 5aee7b1b4..aa74684c9 100644 --- a/pinecone/core/openapi/inference/model/embed_request.py +++ b/pinecone/core/openapi/inference/model/embed_request.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.inference.model.embed_request_inputs import EmbedRequestInputs + def lazy_import(): from pinecone.core.openapi.inference.model.embed_request_inputs import EmbedRequestInputs @@ -94,7 +99,7 @@ def openapi_types(cls): return { "model": (str,), # noqa: E501 "inputs": ([EmbedRequestInputs],), # noqa: E501 - "parameters": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "parameters": (Dict[str, Any],), # noqa: E501 } @cached_class_property @@ -111,6 +116,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of EmbedRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], model, inputs, *args, **kwargs) -> T: # noqa: E501 @@ -151,7 +167,7 @@ def _from_openapi_data(cls: Type[T], model, inputs, *args, **kwargs) -> T: # no Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) for available model parameters. [optional] # noqa: E501 + parameters (Dict[str, Any]): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) for available model parameters. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) @@ -247,7 +263,7 @@ def __init__(self, model, inputs, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) for available model parameters. [optional] # noqa: E501 + parameters (Dict[str, Any]): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/index-data/create-an-index#embedding-models) for available model parameters. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) diff --git a/pinecone/core/openapi/inference/model/embed_request_inputs.py b/pinecone/core/openapi/inference/model/embed_request_inputs.py index 6deaa4906..6833bef78 100644 --- a/pinecone/core/openapi/inference/model/embed_request_inputs.py +++ b/pinecone/core/openapi/inference/model/embed_request_inputs.py @@ -99,6 +99,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of EmbedRequestInputs. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/inference/model/embedding.py b/pinecone/core/openapi/inference/model/embedding.py index d6cf5556a..5e6ee1be5 100644 --- a/pinecone/core/openapi/inference/model/embedding.py +++ b/pinecone/core/openapi/inference/model/embedding.py @@ -26,6 +26,12 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.inference.model.dense_embedding import DenseEmbedding + from pinecone.core.openapi.inference.model.sparse_embedding import SparseEmbedding + def lazy_import(): from pinecone.core.openapi.inference.model.dense_embedding import DenseEmbedding @@ -329,7 +335,7 @@ def __init__(self, *args, **kwargs) -> None: # noqa: E501 ) @cached_property - def _composed_schemas(): # type: ignore + def _composed_schemas(): # we need this here to make our import statements work # we must store _composed_schemas in here so the code is only run # when we invoke this method. If we kept this at the class diff --git a/pinecone/core/openapi/inference/model/embeddings_list.py b/pinecone/core/openapi/inference/model/embeddings_list.py index adf9b5e9b..a73535370 100644 --- a/pinecone/core/openapi/inference/model/embeddings_list.py +++ b/pinecone/core/openapi/inference/model/embeddings_list.py @@ -26,6 +26,12 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.inference.model.embedding import Embedding + from pinecone.core.openapi.inference.model.embeddings_list_usage import EmbeddingsListUsage + def lazy_import(): from pinecone.core.openapi.inference.model.embedding import Embedding @@ -115,6 +121,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of EmbeddingsList. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], model, vector_type, data, usage, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/inference/model/embeddings_list_usage.py b/pinecone/core/openapi/inference/model/embeddings_list_usage.py index 02cb83c18..dfe86eeab 100644 --- a/pinecone/core/openapi/inference/model/embeddings_list_usage.py +++ b/pinecone/core/openapi/inference/model/embeddings_list_usage.py @@ -101,6 +101,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of EmbeddingsListUsage. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/inference/model/error_response.py b/pinecone/core/openapi/inference/model/error_response.py index 9556ba16d..9ebabe1ad 100644 --- a/pinecone/core/openapi/inference/model/error_response.py +++ b/pinecone/core/openapi/inference/model/error_response.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.inference.model.error_response_error import ErrorResponseError + def lazy_import(): from pinecone.core.openapi.inference.model.error_response_error import ErrorResponseError @@ -109,6 +114,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ErrorResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], status, error, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/inference/model/error_response_error.py b/pinecone/core/openapi/inference/model/error_response_error.py index 61c410238..d2eea8d64 100644 --- a/pinecone/core/openapi/inference/model/error_response_error.py +++ b/pinecone/core/openapi/inference/model/error_response_error.py @@ -86,7 +86,7 @@ def openapi_types(cls): return { "code": (str,), # noqa: E501 "message": (str,), # noqa: E501 - "details": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "details": (Dict[str, Any],), # noqa: E501 } @cached_class_property @@ -103,6 +103,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ErrorResponseError. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # noqa: E501 @@ -143,7 +154,7 @@ def _from_openapi_data(cls: Type[T], code, message, *args, **kwargs) -> T: # no Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 + details (Dict[str, Any]): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) @@ -239,7 +250,7 @@ def __init__(self, code, message, *args, **kwargs) -> None: # noqa: E501 Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) - details ({str: (bool, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 + details (Dict[str, Any]): Additional information about the error. This field is not guaranteed to be present. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) diff --git a/pinecone/core/openapi/inference/model/model_info.py b/pinecone/core/openapi/inference/model/model_info.py index a1e10b4ca..387422d6c 100644 --- a/pinecone/core/openapi/inference/model/model_info.py +++ b/pinecone/core/openapi/inference/model/model_info.py @@ -26,6 +26,16 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.inference.model.model_info_supported_metrics import ( + ModelInfoSupportedMetrics, + ) + from pinecone.core.openapi.inference.model.model_info_supported_parameter import ( + ModelInfoSupportedParameter, + ) + def lazy_import(): from pinecone.core.openapi.inference.model.model_info_supported_metrics import ( @@ -139,6 +149,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ModelInfo. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data( diff --git a/pinecone/core/openapi/inference/model/model_info_list.py b/pinecone/core/openapi/inference/model/model_info_list.py index 753524ed6..8452cf108 100644 --- a/pinecone/core/openapi/inference/model/model_info_list.py +++ b/pinecone/core/openapi/inference/model/model_info_list.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.inference.model.model_info import ModelInfo + def lazy_import(): from pinecone.core.openapi.inference.model.model_info import ModelInfo @@ -107,6 +112,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ModelInfoList. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/inference/model/model_info_supported_parameter.py b/pinecone/core/openapi/inference/model/model_info_supported_parameter.py index 542d50bab..df9add2b7 100644 --- a/pinecone/core/openapi/inference/model/model_info_supported_parameter.py +++ b/pinecone/core/openapi/inference/model/model_info_supported_parameter.py @@ -113,6 +113,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ModelInfoSupportedParameter. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data( diff --git a/pinecone/core/openapi/inference/model/ranked_document.py b/pinecone/core/openapi/inference/model/ranked_document.py index 2caa6dd8d..e9687b7ce 100644 --- a/pinecone/core/openapi/inference/model/ranked_document.py +++ b/pinecone/core/openapi/inference/model/ranked_document.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.inference.model.document import Document + def lazy_import(): from pinecone.core.openapi.inference.model.document import Document @@ -111,6 +116,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of RankedDocument. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], index, score, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/inference/model/rerank_request.py b/pinecone/core/openapi/inference/model/rerank_request.py index 2f777938d..60e7856a9 100644 --- a/pinecone/core/openapi/inference/model/rerank_request.py +++ b/pinecone/core/openapi/inference/model/rerank_request.py @@ -26,6 +26,11 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.inference.model.document import Document + def lazy_import(): from pinecone.core.openapi.inference.model.document import Document @@ -98,7 +103,7 @@ def openapi_types(cls): "top_n": (int,), # noqa: E501 "return_documents": (bool,), # noqa: E501 "rank_fields": ([str],), # noqa: E501 - "parameters": ({str: (bool, dict, float, int, list, str, none_type)},), # noqa: E501 + "parameters": (Dict[str, Any],), # noqa: E501 } @cached_class_property @@ -119,6 +124,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of RerankRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], model, query, documents, *args, **kwargs) -> T: # noqa: E501 @@ -163,7 +179,7 @@ def _from_openapi_data(cls: Type[T], model, query, documents, *args, **kwargs) - top_n (int): The number of results to return sorted by relevance. Defaults to the number of inputs. [optional] # noqa: E501 return_documents (bool): Whether to return the documents in the response. [optional] if omitted the server will use the default value of True. # noqa: E501 rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/search/rerank-results#reranking-models). [optional] if omitted the server will use the default value of ["text"]. # noqa: E501 - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 + parameters (Dict[str, Any]): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", False) @@ -264,7 +280,7 @@ def __init__(self, model, query, documents, *args, **kwargs) -> None: # noqa: E top_n (int): The number of results to return sorted by relevance. Defaults to the number of inputs. [optional] # noqa: E501 return_documents (bool): Whether to return the documents in the response. [optional] if omitted the server will use the default value of True. # noqa: E501 rank_fields ([str]): The field(s) to consider for reranking. If not provided, the default is `[\"text\"]`. The number of fields supported is [model-specific](https://docs.pinecone.io/guides/search/rerank-results#reranking-models). [optional] if omitted the server will use the default value of ["text"]. # noqa: E501 - parameters ({str: (bool, dict, float, int, list, str, none_type)}): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 + parameters (Dict[str, Any]): Additional model-specific parameters. Refer to the [model guide](https://docs.pinecone.io/guides/search/rerank-results#reranking-models) for available model parameters. [optional] # noqa: E501 """ _enforce_allowed_values = kwargs.pop("_enforce_allowed_values", True) diff --git a/pinecone/core/openapi/inference/model/rerank_result.py b/pinecone/core/openapi/inference/model/rerank_result.py index 458d27f7b..5f90a2f0a 100644 --- a/pinecone/core/openapi/inference/model/rerank_result.py +++ b/pinecone/core/openapi/inference/model/rerank_result.py @@ -26,6 +26,12 @@ ) from pinecone.openapi_support.exceptions import PineconeApiAttributeError +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pinecone.core.openapi.inference.model.ranked_document import RankedDocument + from pinecone.core.openapi.inference.model.rerank_result_usage import RerankResultUsage + def lazy_import(): from pinecone.core.openapi.inference.model.ranked_document import RankedDocument @@ -113,6 +119,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of RerankResult. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], model, data, usage, *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/inference/model/rerank_result_usage.py b/pinecone/core/openapi/inference/model/rerank_result_usage.py index a92a2ab71..f50b4008a 100644 --- a/pinecone/core/openapi/inference/model/rerank_result_usage.py +++ b/pinecone/core/openapi/inference/model/rerank_result_usage.py @@ -101,6 +101,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of RerankResultUsage. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/inference/model/sparse_embedding.py b/pinecone/core/openapi/inference/model/sparse_embedding.py index 56aaddec2..171201737 100644 --- a/pinecone/core/openapi/inference/model/sparse_embedding.py +++ b/pinecone/core/openapi/inference/model/sparse_embedding.py @@ -105,6 +105,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of SparseEmbedding. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data( diff --git a/pinecone/core/openapi/oauth/api/o_auth_api.py b/pinecone/core/openapi/oauth/api/o_auth_api.py index 818e7c2c4..018ebde5f 100644 --- a/pinecone/core/openapi/oauth/api/o_auth_api.py +++ b/pinecone/core/openapi/oauth/api/o_auth_api.py @@ -9,6 +9,11 @@ Contact: support@pinecone.io """ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, cast +from multiprocessing.pool import ApplyResult + from pinecone.openapi_support import ApiClient, AsyncioApiClient from pinecone.openapi_support.endpoint_utils import ( ExtraOpenApiKwargsTypedDict, @@ -44,7 +49,7 @@ def __get_token( token_request, x_pinecone_api_version="2025-10", **kwargs: ExtraOpenApiKwargsTypedDict, - ): + ) -> TokenResponse | ApplyResult[TokenResponse]: """Create an access token # noqa: E501 Obtain an access token for a service account using the OAuth2 client credentials flow. An access token is needed to authorize requests to the Pinecone Admin API. The host domain for OAuth endpoints is `login.pinecone.io`. # noqa: E501 @@ -84,7 +89,9 @@ def __get_token( kwargs = self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["token_request"] = token_request - return self.call_with_http_info(**kwargs) + return cast( + TokenResponse | ApplyResult[TokenResponse], self.call_with_http_info(**kwargs) + ) self.get_token = _Endpoint( settings={ @@ -133,7 +140,9 @@ def __init__(self, api_client=None) -> None: api_client = AsyncioApiClient() self.api_client = api_client - async def __get_token(self, token_request, x_pinecone_api_version="2025-10", **kwargs): + async def __get_token( + self, token_request, x_pinecone_api_version="2025-10", **kwargs + ) -> TokenResponse: """Create an access token # noqa: E501 Obtain an access token for a service account using the OAuth2 client credentials flow. An access token is needed to authorize requests to the Pinecone Admin API. The host domain for OAuth endpoints is `login.pinecone.io`. # noqa: E501 @@ -166,7 +175,7 @@ async def __get_token(self, token_request, x_pinecone_api_version="2025-10", **k self._process_openapi_kwargs(kwargs) kwargs["x_pinecone_api_version"] = x_pinecone_api_version kwargs["token_request"] = token_request - return await self.call_with_http_info(**kwargs) + return cast(TokenResponse, await self.call_with_http_info(**kwargs)) self.get_token = _AsyncioEndpoint( settings={ diff --git a/pinecone/core/openapi/oauth/model/error_response.py b/pinecone/core/openapi/oauth/model/error_response.py index 626707410..2b572ab8c 100644 --- a/pinecone/core/openapi/oauth/model/error_response.py +++ b/pinecone/core/openapi/oauth/model/error_response.py @@ -101,6 +101,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of ErrorResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data(cls: Type[T], *args, **kwargs) -> T: # noqa: E501 diff --git a/pinecone/core/openapi/oauth/model/token_request.py b/pinecone/core/openapi/oauth/model/token_request.py index bcf94e93b..8aa3b0ac7 100644 --- a/pinecone/core/openapi/oauth/model/token_request.py +++ b/pinecone/core/openapi/oauth/model/token_request.py @@ -105,6 +105,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of TokenRequest. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data( diff --git a/pinecone/core/openapi/oauth/model/token_response.py b/pinecone/core/openapi/oauth/model/token_response.py index d657275de..46e375512 100644 --- a/pinecone/core/openapi/oauth/model/token_response.py +++ b/pinecone/core/openapi/oauth/model/token_response.py @@ -103,6 +103,17 @@ def discriminator(cls): _composed_schemas: Dict[Literal["allOf", "oneOf", "anyOf"], Any] = {} + def __new__(cls: Type[T], *args: Any, **kwargs: Any) -> T: + """Create a new instance of TokenResponse. + + This method is overridden to provide proper type inference for mypy. + The actual instance creation logic (including discriminator handling) + is handled by the parent class's __new__ method. + """ + # Call parent's __new__ with all arguments to preserve discriminator logic + instance: T = super().__new__(cls, *args, **kwargs) + return instance + @classmethod @convert_js_args_to_python_args def _from_openapi_data( diff --git a/pinecone/db_control/db_control.py b/pinecone/db_control/db_control.py index daa81b68e..145cce4b4 100644 --- a/pinecone/db_control/db_control.py +++ b/pinecone/db_control/db_control.py @@ -32,7 +32,7 @@ def __init__( self._pool_threads = pool_threads """ :meta private: """ - self._index_api = setup_openapi_client( + self._index_api: ManageIndexesApi = setup_openapi_client( api_client_klass=ApiClient, api_klass=ManageIndexesApi, config=self.config, diff --git a/pinecone/db_control/db_control_asyncio.py b/pinecone/db_control/db_control_asyncio.py index 005c25f22..cd87c207c 100644 --- a/pinecone/db_control/db_control_asyncio.py +++ b/pinecone/db_control/db_control_asyncio.py @@ -27,7 +27,7 @@ def __init__(self, config: "Config", openapi_config: "OpenApiConfiguration") -> self._openapi_config = openapi_config """ :meta private: """ - self._index_api = setup_async_openapi_client( + self._index_api: AsyncioManageIndexesApi = setup_async_openapi_client( api_client_klass=AsyncioApiClient, api_klass=AsyncioManageIndexesApi, config=self._config, diff --git a/pinecone/db_control/index_host_store.py b/pinecone/db_control/index_host_store.py index eec2e7efe..43e383099 100644 --- a/pinecone/db_control/index_host_store.py +++ b/pinecone/db_control/index_host_store.py @@ -1,4 +1,4 @@ -from typing import Dict +from typing import Dict, Any, Type from pinecone.config import Config from pinecone.core.openapi.db_control.api.manage_indexes_api import ( ManageIndexesApi as IndexOperationsApi, @@ -8,7 +8,7 @@ class SingletonMeta(type): - _instances: Dict[str, str] = {} + _instances: Dict[Type[Any], Any] = {} def __call__(cls, *args, **kwargs): if cls not in cls._instances: diff --git a/pinecone/db_control/models/serverless_spec.py b/pinecone/db_control/models/serverless_spec.py index f7adc64d5..e2e8a3e37 100644 --- a/pinecone/db_control/models/serverless_spec.py +++ b/pinecone/db_control/models/serverless_spec.py @@ -108,8 +108,10 @@ def __init__( object.__setattr__(self, "read_capacity", read_capacity) object.__setattr__(self, "schema", schema) - def asdict(self): - result = {"serverless": {"cloud": self.cloud, "region": self.region}} + def asdict(self) -> Dict[str, Any]: + from typing import Dict, Any + + result: Dict[str, Any] = {"serverless": {"cloud": self.cloud, "region": self.region}} if self.read_capacity is not None: result["serverless"]["read_capacity"] = self.read_capacity if self.schema is not None: diff --git a/pinecone/db_control/request_factory.py b/pinecone/db_control/request_factory.py index 32a456482..a7838969a 100644 --- a/pinecone/db_control/request_factory.py +++ b/pinecone/db_control/request_factory.py @@ -75,10 +75,14 @@ class PineconeDBControlRequestFactory: @staticmethod def __parse_tags(tags: Optional[Dict[str, str]]) -> IndexTags: + from typing import cast + if tags is None: - return IndexTags() + result = IndexTags() + return cast(IndexTags, result) else: - return IndexTags(**tags) + result = IndexTags(**tags) + return cast(IndexTags, result) @staticmethod def __parse_deletion_protection(deletion_protection: Union[DeletionProtection, str]) -> str: @@ -99,10 +103,13 @@ def __parse_read_capacity( :param read_capacity: Dict with read capacity configuration or existing ReadCapacity model instance :return: ReadCapacityOnDemandSpec, ReadCapacityDedicatedSpec, or existing model instance """ + from typing import cast + if isinstance(read_capacity, dict): mode = read_capacity.get("mode", "OnDemand") if mode == "OnDemand": - return ReadCapacityOnDemandSpec(mode="OnDemand") + result = ReadCapacityOnDemandSpec(mode="OnDemand") + return cast(ReadCapacityOnDemandSpec, result) elif mode == "Dedicated": dedicated_dict: Dict[str, Any] = read_capacity.get("dedicated", {}) # type: ignore # Construct ReadCapacityDedicatedConfig @@ -152,13 +159,19 @@ def __parse_read_capacity( dedicated_config_kwargs["manual"] = ScalingConfigManual(**manual_dict) dedicated_config = ReadCapacityDedicatedConfig(**dedicated_config_kwargs) - return ReadCapacityDedicatedSpec(mode="Dedicated", dedicated=dedicated_config) + result = ReadCapacityDedicatedSpec(mode="Dedicated", dedicated=dedicated_config) + return cast(ReadCapacityDedicatedSpec, result) else: # Fallback: let OpenAPI handle it - return read_capacity # type: ignore + from typing import cast + + return cast( + Union[ReadCapacityOnDemandSpec, ReadCapacityDedicatedSpec, "ReadCapacity"], + read_capacity, + ) else: # Already a ReadCapacity model instance - return read_capacity # type: ignore + return read_capacity @staticmethod def __parse_schema( @@ -221,10 +234,13 @@ def __parse_schema( "or provide field_name: field_config pairs directly." ) - return BackupModelSchema(**schema_kwargs) + from typing import cast + + result = BackupModelSchema(**schema_kwargs) + return cast(BackupModelSchema, result) else: # Already a BackupModelSchema instance - return schema # type: ignore + return schema @staticmethod def __parse_index_spec(spec: Union[Dict, ServerlessSpec, PodSpec, ByocSpec]) -> IndexSpec: @@ -336,7 +352,9 @@ def __parse_index_spec(spec: Union[Dict, ServerlessSpec, PodSpec, ByocSpec]) -> else: raise TypeError("spec must be of type dict, ServerlessSpec, PodSpec, or ByocSpec") - return index_spec + from typing import cast + + return cast(IndexSpec, index_spec) @staticmethod def create_index_request( @@ -375,7 +393,10 @@ def create_index_request( ] ) - return CreateIndexRequest(**args) + from typing import cast + + result = CreateIndexRequest(**args) + return cast(CreateIndexRequest, result) @staticmethod def create_index_for_model_request( @@ -454,7 +475,10 @@ def create_index_for_model_request( ] ) - return CreateIndexForModelRequest(**args) + from typing import cast + + result = CreateIndexForModelRequest(**args) + return cast(CreateIndexForModelRequest, result) @staticmethod def create_index_from_backup_request( @@ -469,7 +493,10 @@ def create_index_from_backup_request( tags_obj = PineconeDBControlRequestFactory.__parse_tags(tags) - return CreateIndexFromBackupRequest(name=name, deletion_protection=dp, tags=tags_obj) + from typing import cast + + result = CreateIndexFromBackupRequest(name=name, deletion_protection=dp, tags=tags_obj) + return cast(CreateIndexFromBackupRequest, result) @staticmethod def configure_index_request( @@ -544,8 +571,14 @@ def configure_index_request( ] ) - return ConfigureIndexRequest(**args_dict) + from typing import cast + + result = ConfigureIndexRequest(**args_dict) + return cast(ConfigureIndexRequest, result) @staticmethod def create_collection_request(name: str, source: str) -> CreateCollectionRequest: - return CreateCollectionRequest(name=name, source=source) + from typing import cast + + result = CreateCollectionRequest(name=name, source=source) + return cast(CreateCollectionRequest, result) diff --git a/pinecone/db_control/resources/asyncio/backup.py b/pinecone/db_control/resources/asyncio/backup.py index f01f9fcc4..0a54cf45d 100644 --- a/pinecone/db_control/resources/asyncio/backup.py +++ b/pinecone/db_control/resources/asyncio/backup.py @@ -94,4 +94,5 @@ async def delete(self, *, backup_id: str) -> None: :param backup_id: The ID of the backup to delete. :type backup_id: str """ - return await self._index_api.delete_backup(backup_id=backup_id) + await self._index_api.delete_backup(backup_id=backup_id) + return None diff --git a/pinecone/db_control/resources/asyncio/collection.py b/pinecone/db_control/resources/asyncio/collection.py index 27916df50..7f7d2d64d 100644 --- a/pinecone/db_control/resources/asyncio/collection.py +++ b/pinecone/db_control/resources/asyncio/collection.py @@ -1,4 +1,5 @@ import logging +from typing import Dict, Any from pinecone.db_control.models import CollectionList @@ -14,7 +15,7 @@ def __init__(self, index_api): self.index_api = index_api @require_kwargs - async def create(self, *, name: str, source: str): + async def create(self, *, name: str, source: str) -> None: req = PineconeDBControlRequestFactory.create_collection_request(name=name, source=source) await self.index_api.create_collection(create_collection_request=req) @@ -24,9 +25,12 @@ async def list(self) -> CollectionList: return CollectionList(response) @require_kwargs - async def delete(self, *, name: str): + async def delete(self, *, name: str) -> None: await self.index_api.delete_collection(name) @require_kwargs - async def describe(self, *, name: str): - return await self.index_api.describe_collection(name).to_dict() + async def describe(self, *, name: str) -> Dict[str, Any]: + from typing import cast + + result = await self.index_api.describe_collection(name) + return cast(Dict[str, Any], result.to_dict()) diff --git a/pinecone/db_control/resources/asyncio/index.py b/pinecone/db_control/resources/asyncio/index.py index 36871cf6d..f7825e02b 100644 --- a/pinecone/db_control/resources/asyncio/index.py +++ b/pinecone/db_control/resources/asyncio/index.py @@ -77,7 +77,9 @@ async def create( resp = await self._index_api.create_index(create_index_request=req) if timeout == -1: - return IndexModel(resp) + from typing import cast + + return IndexModel(cast(Any, resp)) return await self.__poll_describe_index_until_ready(name, timeout) @require_kwargs @@ -124,7 +126,9 @@ async def create_for_model( resp = await self._index_api.create_index_for_model(req) if timeout == -1: - return IndexModel(resp) + from typing import cast + + return IndexModel(cast(Any, resp)) return await self.__poll_describe_index_until_ready(name, timeout) @require_kwargs @@ -145,7 +149,9 @@ async def create_from_backup( ) return await self.__poll_describe_index_until_ready(name, timeout) - async def __poll_describe_index_until_ready(self, name: str, timeout: Optional[int] = None): + async def __poll_describe_index_until_ready( + self, name: str, timeout: Optional[int] = None + ) -> IndexModel: total_wait_time = 0 while True: description = await self.describe(name=name) @@ -170,7 +176,7 @@ async def __poll_describe_index_until_ready(self, name: str, timeout: Optional[i await asyncio.sleep(5) @require_kwargs - async def delete(self, *, name: str, timeout: Optional[int] = None): + async def delete(self, *, name: str, timeout: Optional[int] = None) -> None: await self._index_api.delete_index(name) if timeout == -1: @@ -228,7 +234,7 @@ async def configure( "ReadCapacityDedicatedSpec", ] ] = None, - ): + ) -> None: description = await self.describe(name=name) req = PineconeDBControlRequestFactory.configure_index_request( diff --git a/pinecone/db_control/resources/sync/backup.py b/pinecone/db_control/resources/sync/backup.py index b5d565fc2..051c183cc 100644 --- a/pinecone/db_control/resources/sync/backup.py +++ b/pinecone/db_control/resources/sync/backup.py @@ -108,4 +108,5 @@ def delete(self, *, backup_id: str) -> None: :param backup_id: The ID of the backup to delete. :type backup_id: str """ - return self._index_api.delete_backup(backup_id=backup_id) + self._index_api.delete_backup(backup_id=backup_id) + return None diff --git a/pinecone/db_control/resources/sync/index.py b/pinecone/db_control/resources/sync/index.py index 6a3096ae3..afe11ee45 100644 --- a/pinecone/db_control/resources/sync/index.py +++ b/pinecone/db_control/resources/sync/index.py @@ -93,7 +93,9 @@ def create( resp = self._index_api.create_index(create_index_request=req) if timeout == -1: - return IndexModel(resp) + from typing import cast + + return IndexModel(cast(Any, resp)) return self.__poll_describe_index_until_ready(name, timeout) @require_kwargs @@ -140,7 +142,9 @@ def create_for_model( resp = self._index_api.create_index_for_model(req) if timeout == -1: - return IndexModel(resp) + from typing import cast + + return IndexModel(cast(Any, resp)) return self.__poll_describe_index_until_ready(name, timeout) @require_kwargs @@ -178,7 +182,9 @@ def create_from_backup( return self.describe(name=name) return self.__poll_describe_index_until_ready(name, timeout) - def __poll_describe_index_until_ready(self, name: str, timeout: Optional[int] = None): + def __poll_describe_index_until_ready( + self, name: str, timeout: Optional[int] = None + ) -> IndexModel: total_wait_time = 0 while True: description = self.describe(name=name) diff --git a/pinecone/db_data/dataclasses/fetch_by_metadata_response.py b/pinecone/db_data/dataclasses/fetch_by_metadata_response.py index 9783a4f01..bda7cf2a9 100644 --- a/pinecone/db_data/dataclasses/fetch_by_metadata_response.py +++ b/pinecone/db_data/dataclasses/fetch_by_metadata_response.py @@ -4,6 +4,7 @@ from .vector import Vector from .utils import DictLike from pinecone.utils.response_info import ResponseInfo +from pinecone.core.openapi.db_data.models import Usage @dataclass @@ -15,7 +16,7 @@ class Pagination(DictLike): class FetchByMetadataResponse(DictLike): namespace: str vectors: Dict[str, Vector] - usage: Dict[str, int] + usage: Optional[Usage] = None pagination: Optional[Pagination] = None _response_info: ResponseInfo = field( default_factory=lambda: cast(ResponseInfo, {"raw_headers": {}}), repr=True, compare=False diff --git a/pinecone/db_data/dataclasses/fetch_response.py b/pinecone/db_data/dataclasses/fetch_response.py index fef301b00..c6627bff8 100644 --- a/pinecone/db_data/dataclasses/fetch_response.py +++ b/pinecone/db_data/dataclasses/fetch_response.py @@ -1,16 +1,17 @@ from dataclasses import dataclass, field -from typing import Dict, cast +from typing import Dict, Optional, cast from .vector import Vector from .utils import DictLike from pinecone.utils.response_info import ResponseInfo +from pinecone.core.openapi.db_data.models import Usage @dataclass class FetchResponse(DictLike): namespace: str vectors: Dict[str, Vector] - usage: Dict[str, int] + usage: Optional[Usage] = None _response_info: ResponseInfo = field( default_factory=lambda: cast(ResponseInfo, {"raw_headers": {}}), repr=True, compare=False ) diff --git a/pinecone/db_data/dataclasses/search_query.py b/pinecone/db_data/dataclasses/search_query.py index 6ce904f47..16e5dbdb2 100644 --- a/pinecone/db_data/dataclasses/search_query.py +++ b/pinecone/db_data/dataclasses/search_query.py @@ -57,7 +57,7 @@ def __post_init__(self): Converts `vector` to a `SearchQueryVectorTypedDict` instance if an enum is provided. """ if isinstance(self.vector, SearchQueryVector): - self.vector = self.vector.as_dict() + self.vector = self.vector.as_dict() # type: ignore[assignment] def as_dict(self) -> Dict[str, Any]: """ diff --git a/pinecone/db_data/dataclasses/utils.py b/pinecone/db_data/dataclasses/utils.py index 62c8ba978..890b31910 100644 --- a/pinecone/db_data/dataclasses/utils.py +++ b/pinecone/db_data/dataclasses/utils.py @@ -1,16 +1,23 @@ +from typing import Any + + class DictLike: - def __getitem__(self, key): - if key in self.__dataclass_fields__: + def __getitem__(self, key: str) -> Any: + if hasattr(self, "__dataclass_fields__") and key in getattr( + self, "__dataclass_fields__", {} + ): return getattr(self, key) raise KeyError(f"{key} is not a valid field") - def __setitem__(self, key, value): - if key in self.__dataclass_fields__: + def __setitem__(self, key: str, value: Any) -> None: + if hasattr(self, "__dataclass_fields__") and key in getattr( + self, "__dataclass_fields__", {} + ): setattr(self, key, value) else: raise KeyError(f"{key} is not a valid field") - def get(self, key, default=None): + def get(self, key: str, default: Any = None) -> Any: """Dict-like get method for compatibility with tests that use .get()""" try: return self[key] diff --git a/pinecone/db_data/filter_builder.py b/pinecone/db_data/filter_builder.py index a26e03f80..b31d190af 100644 --- a/pinecone/db_data/filter_builder.py +++ b/pinecone/db_data/filter_builder.py @@ -387,4 +387,4 @@ def build(self) -> FilterTypedDict: raise ValueError("FilterBuilder must have at least one condition") # Type cast to FilterTypedDict - the actual structure may support # nested $and/$or even though the type system doesn't fully capture it - return self._filter # type: ignore[return-value] + return self._filter diff --git a/pinecone/db_data/index.py b/pinecone/db_data/index.py index f98c6f173..3c823f3f0 100644 --- a/pinecone/db_data/index.py +++ b/pinecone/db_data/index.py @@ -1,8 +1,10 @@ +from __future__ import annotations + from pinecone.utils.tqdm import tqdm import warnings import logging import json -from typing import Union, List, Optional, Dict, Any, Literal, Iterator, TYPE_CHECKING +from typing import List, Dict, Any, Literal, Iterator, TYPE_CHECKING from pinecone.config import ConfigBuilder @@ -74,7 +76,7 @@ """ :meta private: """ -def parse_query_response(response: OpenAPIQueryResponse): +def parse_query_response(response: OpenAPIQueryResponse) -> QueryResponse: """:meta private:""" # Convert OpenAPI QueryResponse to dataclass QueryResponse from pinecone.utils.response_info import extract_response_info @@ -98,24 +100,52 @@ def parse_query_response(response: OpenAPIQueryResponse): ) +class UpsertResponseTransformer: + """Transformer for converting ApplyResult[OpenAPIUpsertResponse] to UpsertResponse. + + This wrapper transforms the OpenAPI response to our dataclass when .get() is called, + while delegating other methods to the underlying ApplyResult. + """ + + def __init__(self, apply_result: ApplyResult): + self._apply_result = apply_result + + def get(self, timeout=None): + openapi_response = self._apply_result.get(timeout) + from pinecone.utils.response_info import extract_response_info + + response_info = None + if hasattr(openapi_response, "_response_info"): + response_info = openapi_response._response_info + if response_info is None: + response_info = extract_response_info({}) + return UpsertResponse( + upserted_count=openapi_response.upserted_count, _response_info=response_info + ) + + def __getattr__(self, name): + # Delegate other methods to the underlying ApplyResult + return getattr(self._apply_result, name) + + class Index(PluginAware, IndexInterface): """ A client for interacting with a Pinecone index via REST API. For improved performance, use the Pinecone GRPC index client. """ - _bulk_import_resource: Optional["BulkImportResource"] + _bulk_import_resource: "BulkImportResource" | None """ :meta private: """ - _namespace_resource: Optional["NamespaceResource"] + _namespace_resource: "NamespaceResource" | None """ :meta private: """ def __init__( self, api_key: str, host: str, - pool_threads: Optional[int] = None, - additional_headers: Optional[Dict[str, str]] = {}, + pool_threads: int | None = None, + additional_headers: Dict[str, str] | None = {}, openapi_config=None, **kwargs, ): @@ -133,8 +163,9 @@ def __init__( self._pool_threads = pool_threads """ :meta private: """ - if kwargs.get("connection_pool_maxsize", None): - self._openapi_config.connection_pool_maxsize = kwargs.get("connection_pool_maxsize") + connection_pool_maxsize = kwargs.get("connection_pool_maxsize", None) + if connection_pool_maxsize is not None: + self._openapi_config.connection_pool_maxsize = connection_pool_maxsize self._vector_api = setup_openapi_client( api_client_klass=ApiClient, @@ -219,14 +250,14 @@ def close(self): @validate_and_convert_errors def upsert( self, - vectors: Union[ - List[Vector], List[VectorTuple], List[VectorTupleWithMetadata], List[VectorTypedDict] - ], - namespace: Optional[str] = None, - batch_size: Optional[int] = None, + vectors: ( + List[Vector] | List[VectorTuple] | List[VectorTupleWithMetadata] | List[VectorTypedDict] + ), + namespace: str | None = None, + batch_size: int | None = None, show_progress: bool = True, **kwargs, - ) -> Union[UpsertResponse, ApplyResult]: + ) -> UpsertResponse | ApplyResult: _check_type = kwargs.pop("_check_type", True) if kwargs.get("async_req", False) and batch_size is not None: @@ -241,33 +272,11 @@ def upsert( # If async_req=True, result is an ApplyResult[OpenAPIUpsertResponse] # We need to wrap it to convert to our dataclass when .get() is called if kwargs.get("async_req", False): - # Create a wrapper that transforms the OpenAPI response to our dataclass - class UpsertResponseTransformer: - def __init__(self, apply_result: ApplyResult): - self._apply_result = apply_result - - def get(self, timeout=None): - openapi_response = self._apply_result.get(timeout) - from pinecone.utils.response_info import extract_response_info - - response_info = None - if hasattr(openapi_response, "_response_info"): - response_info = openapi_response._response_info - if response_info is None: - response_info = extract_response_info({}) - return UpsertResponse( - upserted_count=openapi_response.upserted_count, - _response_info=response_info, - ) - - def __getattr__(self, name): - # Delegate other methods to the underlying ApplyResult - return getattr(self._apply_result, name) - # result is ApplyResult when async_req=True return UpsertResponseTransformer(result) # type: ignore[arg-type, return-value] # result is UpsertResponse when async_req=False - return result # type: ignore[return-value] + # _upsert_batch already returns UpsertResponse when async_req=False + return result if not isinstance(batch_size, int) or batch_size <= 0: raise ValueError("batch_size must be a positive integer") @@ -301,13 +310,13 @@ def __getattr__(self, name): def _upsert_batch( self, - vectors: Union[ - List[Vector], List[VectorTuple], List[VectorTupleWithMetadata], List[VectorTypedDict] - ], - namespace: Optional[str], + vectors: ( + List[Vector] | List[VectorTuple] | List[VectorTupleWithMetadata] | List[VectorTypedDict] + ), + namespace: str | None, _check_type: bool, **kwargs, - ) -> Union[UpsertResponse, ApplyResult]: + ) -> UpsertResponse | ApplyResult: # Convert OpenAPI UpsertResponse to dataclass UpsertResponse result = self._vector_api.upsert_vectors( IndexRequestFactory.upsert_request(vectors, namespace, _check_type, **kwargs), @@ -319,7 +328,7 @@ def _upsert_batch( if kwargs.get("async_req", False): # Return ApplyResult - it will be unwrapped by the caller # The ApplyResult contains OpenAPIUpsertResponse which will be converted when .get() is called - return result # type: ignore[return-value] # ApplyResult is not tracked through OpenAPI layers + return result # type: ignore[no-any-return] # ApplyResult is not tracked through OpenAPI layers from pinecone.utils.response_info import extract_response_info @@ -339,7 +348,7 @@ def _iter_dataframe(df, batch_size): @validate_and_convert_errors def upsert_from_dataframe( - self, df, namespace: Optional[str] = None, batch_size: int = 500, show_progress: bool = True + self, df, namespace: str | None = None, batch_size: int = 500, show_progress: bool = True ) -> UpsertResponse: try: import pandas as pd @@ -361,6 +370,10 @@ def upsert_from_dataframe( upserted_count = 0 last_result = None for res in results: + # upsert_from_dataframe doesn't use async_req, so res is always UpsertResponse + assert isinstance( + res, UpsertResponse + ), "Expected UpsertResponse when not using async_req" upserted_count += res.upserted_count last_result = res @@ -403,45 +416,51 @@ def upsert_records(self, namespace: str, records: List[Dict]) -> UpsertResponse: def search( self, namespace: str, - query: Union[SearchQueryTypedDict, SearchQuery], - rerank: Optional[Union[SearchRerankTypedDict, SearchRerank]] = None, - fields: Optional[List[str]] = ["*"], # Default to returning all fields + query: SearchQueryTypedDict | SearchQuery, + rerank: SearchRerankTypedDict | SearchRerank | None = None, + fields: List[str] | None = ["*"], # Default to returning all fields ) -> SearchRecordsResponse: if namespace is None: raise Exception("Namespace is required when searching records") request = IndexRequestFactory.search_request(query=query, rerank=rerank, fields=fields) - return self._vector_api.search_records_namespace(namespace, request) + from typing import cast + + result = self._vector_api.search_records_namespace(namespace, request) + return cast(SearchRecordsResponse, result) @validate_and_convert_errors def search_records( self, namespace: str, - query: Union[SearchQueryTypedDict, SearchQuery], - rerank: Optional[Union[SearchRerankTypedDict, SearchRerank]] = None, - fields: Optional[List[str]] = ["*"], # Default to returning all fields + query: SearchQueryTypedDict | SearchQuery, + rerank: SearchRerankTypedDict | SearchRerank | None = None, + fields: List[str] | None = ["*"], # Default to returning all fields ) -> SearchRecordsResponse: return self.search(namespace, query=query, rerank=rerank, fields=fields) @validate_and_convert_errors def delete( self, - ids: Optional[List[str]] = None, - delete_all: Optional[bool] = None, - namespace: Optional[str] = None, - filter: Optional[Dict[str, Union[str, float, int, bool, List, dict]]] = None, + ids: List[str] | None = None, + delete_all: bool | None = None, + namespace: str | None = None, + filter: FilterTypedDict | None = None, **kwargs, ) -> Dict[str, Any]: - return self._vector_api.delete_vectors( + from typing import cast + + result = self._vector_api.delete_vectors( IndexRequestFactory.delete_request( ids=ids, delete_all=delete_all, namespace=namespace, filter=filter, **kwargs ), **self._openapi_kwargs(kwargs), ) + return cast(Dict[str, Any], result) @validate_and_convert_errors - def fetch(self, ids: List[str], namespace: Optional[str] = None, **kwargs) -> FetchResponse: + def fetch(self, ids: List[str], namespace: str | None = None, **kwargs) -> FetchResponse: args_dict = parse_non_empty_args([("namespace", namespace)]) result = self._vector_api.fetch_vectors(ids=ids, **args_dict, **kwargs) # Copy response info from OpenAPI response if present @@ -465,9 +484,9 @@ def fetch(self, ids: List[str], namespace: Optional[str] = None, **kwargs) -> Fe def fetch_by_metadata( self, filter: FilterTypedDict, - namespace: Optional[str] = None, - limit: Optional[int] = None, - pagination_token: Optional[str] = None, + namespace: str | None = None, + limit: int | None = None, + pagination_token: str | None = None, **kwargs, ) -> FetchByMetadataResponse: """Fetch vectors by metadata filter. @@ -490,7 +509,7 @@ def fetch_by_metadata( ... ) Args: - filter (Dict[str, Union[str, float, int, bool, List, dict]]): + filter (Dict[str, str | float | int | bool | List | dict]): Metadata filter expression to select vectors. See `metadata filtering _` namespace (str): The namespace to fetch vectors from. @@ -537,15 +556,15 @@ def query( self, *args, top_k: int, - vector: Optional[List[float]] = None, - id: Optional[str] = None, - namespace: Optional[str] = None, - filter: Optional[FilterTypedDict] = None, - include_values: Optional[bool] = None, - include_metadata: Optional[bool] = None, - sparse_vector: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + vector: List[float] | None = None, + id: str | None = None, + namespace: str | None = None, + filter: FilterTypedDict | None = None, + include_values: bool | None = None, + include_metadata: bool | None = None, + sparse_vector: SparseValues | SparseVectorTypedDict | None = None, **kwargs, - ) -> Union[QueryResponse, ApplyResult]: + ) -> QueryResponse | ApplyResult: response = self._query( *args, top_k=top_k, @@ -570,13 +589,13 @@ def _query( self, *args, top_k: int, - vector: Optional[List[float]] = None, - id: Optional[str] = None, - namespace: Optional[str] = None, - filter: Optional[FilterTypedDict] = None, - include_values: Optional[bool] = None, - include_metadata: Optional[bool] = None, - sparse_vector: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + vector: List[float] | None = None, + id: str | None = None, + namespace: str | None = None, + filter: FilterTypedDict | None = None, + include_values: bool | None = None, + include_metadata: bool | None = None, + sparse_vector: SparseValues | SparseVectorTypedDict | None = None, **kwargs, ) -> OpenAPIQueryResponse: if len(args) > 0: @@ -598,21 +617,23 @@ def _query( sparse_vector=sparse_vector, **kwargs, ) - return self._vector_api.query_vectors(request, **self._openapi_kwargs(kwargs)) + from typing import cast + + result = self._vector_api.query_vectors(request, **self._openapi_kwargs(kwargs)) + # When async_req=False, result is QueryResponse, not ApplyResult + return cast(OpenAPIQueryResponse, result) @validate_and_convert_errors def query_namespaces( self, - vector: Optional[List[float]], + vector: List[float] | None, namespaces: List[str], metric: Literal["cosine", "euclidean", "dotproduct"], - top_k: Optional[int] = None, - filter: Optional[Dict[str, Union[str, float, int, bool, List, dict]]] = None, - include_values: Optional[bool] = None, - include_metadata: Optional[bool] = None, - sparse_vector: Optional[ - Union[SparseValues, Dict[str, Union[List[float], List[int]]]] - ] = None, + top_k: int | None = None, + filter: FilterTypedDict | None = None, + include_values: bool | None = None, + include_metadata: bool | None = None, + sparse_vector: SparseValues | SparseVectorTypedDict | None = None, **kwargs, ) -> QueryNamespacesResults: if namespaces is None or len(namespaces) == 0: @@ -641,7 +662,12 @@ def query_namespaces( for ns in target_namespaces ] - for result in as_completed(async_futures): + from typing import cast + from concurrent.futures import Future + + # async_futures is a list of ApplyResult, but as_completed expects Future + futures: List[Future[Any]] = cast(List[Future[Any]], async_futures) + for result in as_completed(futures): raw_result = result.result() response = json.loads(raw_result.data.decode("utf-8")) aggregator.add_results(response) @@ -652,13 +678,13 @@ def query_namespaces( @validate_and_convert_errors def update( self, - id: Optional[str] = None, - values: Optional[List[float]] = None, - set_metadata: Optional[VectorMetadataTypedDict] = None, - namespace: Optional[str] = None, - sparse_values: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, - filter: Optional[FilterTypedDict] = None, - dry_run: Optional[bool] = None, + id: str | None = None, + values: List[float] | None = None, + set_metadata: VectorMetadataTypedDict | None = None, + namespace: str | None = None, + sparse_values: SparseValues | SparseVectorTypedDict | None = None, + filter: FilterTypedDict | None = None, + dry_run: bool | None = None, **kwargs, ) -> UpdateResponse: # Validate that exactly one of id or filter is provided @@ -706,20 +732,24 @@ def update( @validate_and_convert_errors def describe_index_stats( - self, filter: Optional[FilterTypedDict] = None, **kwargs + self, filter: FilterTypedDict | None = None, **kwargs ) -> DescribeIndexStatsResponse: - return self._vector_api.describe_index_stats( + from typing import cast + + result = self._vector_api.describe_index_stats( IndexRequestFactory.describe_index_stats_request(filter, **kwargs), **self._openapi_kwargs(kwargs), ) + # When async_req=False, result is IndexDescription, not ApplyResult + return cast(DescribeIndexStatsResponse, result) @validate_and_convert_errors def list_paginated( self, - prefix: Optional[str] = None, - limit: Optional[int] = None, - pagination_token: Optional[str] = None, - namespace: Optional[str] = None, + prefix: str | None = None, + limit: int | None = None, + pagination_token: str | None = None, + namespace: str | None = None, **kwargs, ) -> ListResponse: args_dict = IndexRequestFactory.list_paginated_args( @@ -729,7 +759,11 @@ def list_paginated( namespace=namespace, **kwargs, ) - return self._vector_api.list_vectors(**args_dict, **kwargs) + from typing import cast + + result = self._vector_api.list_vectors(**args_dict, **kwargs) + # When async_req=False, result is ListResponse, not ApplyResult + return cast(ListResponse, result) @validate_and_convert_errors def list(self, **kwargs): @@ -748,15 +782,13 @@ def list(self, **kwargs): def start_import( self, uri: str, - integration_id: Optional[str] = None, - error_mode: Optional[ - Union["ImportErrorMode", Literal["CONTINUE", "ABORT"], str] - ] = "CONTINUE", + integration_id: str | None = None, + error_mode: ("ImportErrorMode" | Literal["CONTINUE", "ABORT"] | str) | None = "CONTINUE", ) -> "StartImportResponse": """ Args: uri (str): The URI of the data to import. The URI must start with the scheme of a supported storage provider. - integration_id (Optional[str], optional): If your bucket requires authentication to access, you need to pass the id of your storage integration using this property. Defaults to None. + integration_id (str | None, optional): If your bucket requires authentication to access, you need to pass the id of your storage integration using this property. Defaults to None. error_mode: Defaults to "CONTINUE". If set to "CONTINUE", the import operation will continue even if some records fail to import. Pass "ABORT" to stop the import operation if any records fail to import. @@ -779,8 +811,8 @@ def start_import( def list_imports(self, **kwargs) -> Iterator["ImportModel"]: """ Args: - limit (Optional[int]): The maximum number of operations to fetch in each network call. If unspecified, the server will use a default value. [optional] - pagination_token (Optional[str]): When there are multiple pages of results, a pagination token is returned in the response. The token can be used + limit (int | None): The maximum number of operations to fetch in each network call. If unspecified, the server will use a default value. [optional] + pagination_token (str | None): When there are multiple pages of results, a pagination token is returned in the response. The token can be used to fetch the next page of results. [optional] Returns: @@ -807,12 +839,12 @@ def list_imports(self, **kwargs) -> Iterator["ImportModel"]: @validate_and_convert_errors def list_imports_paginated( - self, limit: Optional[int] = None, pagination_token: Optional[str] = None, **kwargs + self, limit: int | None = None, pagination_token: str | None = None, **kwargs ) -> "ListImportsResponse": """ Args: - limit (Optional[int]): The maximum number of ids to return. If unspecified, the server will use a default value. [optional] - pagination_token (Optional[str]): A token needed to fetch the next page of results. This token is returned + limit (int | None): The maximum number of ids to return. If unspecified, the server will use a default value. [optional] + pagination_token (str | None): A token needed to fetch the next page of results. This token is returned in the response if additional results are available. [optional] Returns: ListImportsResponse object which contains the list of operations as ImportModel objects, pagination information, @@ -872,7 +904,7 @@ def cancel_import(self, id: str): @validate_and_convert_errors @require_kwargs def create_namespace( - self, name: str, schema: Optional[Dict[str, Any]] = None, **kwargs + self, name: str, schema: Dict[str, Any] | None = None, **kwargs ) -> "NamespaceDescription": return self.namespace.create(name=name, schema=schema, **kwargs) @@ -884,19 +916,22 @@ def describe_namespace(self, namespace: str, **kwargs) -> "NamespaceDescription" @validate_and_convert_errors @require_kwargs def delete_namespace(self, namespace: str, **kwargs) -> Dict[str, Any]: - return self.namespace.delete(namespace=namespace, **kwargs) + from typing import cast + + result = self.namespace.delete(namespace=namespace, **kwargs) + return cast(Dict[str, Any], result) @validate_and_convert_errors @require_kwargs def list_namespaces( - self, limit: Optional[int] = None, **kwargs + self, limit: int | None = None, **kwargs ) -> Iterator[ListNamespacesResponse]: return self.namespace.list(limit=limit, **kwargs) @validate_and_convert_errors @require_kwargs def list_namespaces_paginated( - self, limit: Optional[int] = None, pagination_token: Optional[str] = None, **kwargs + self, limit: int | None = None, pagination_token: str | None = None, **kwargs ) -> ListNamespacesResponse: return self.namespace.list_paginated( limit=limit, pagination_token=pagination_token, **kwargs diff --git a/pinecone/db_data/index_asyncio.py b/pinecone/db_data/index_asyncio.py index f4046fc2d..4d18e97e4 100644 --- a/pinecone/db_data/index_asyncio.py +++ b/pinecone/db_data/index_asyncio.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pinecone.utils.tqdm import tqdm @@ -7,7 +9,8 @@ from .index_asyncio_interface import IndexAsyncioInterface from .query_results_aggregator import QueryResultsAggregator -from typing import Union, List, Optional, Dict, Any, Literal, AsyncIterator, TYPE_CHECKING +from typing import List, Optional, Dict, Any, Literal, AsyncIterator, TYPE_CHECKING +from typing_extensions import Self from pinecone.config import ConfigBuilder @@ -87,7 +90,7 @@ """ :meta private: """ -def parse_query_response(response: OpenAPIQueryResponse): +def parse_query_response(response: OpenAPIQueryResponse) -> QueryResponse: """:meta private:""" # Convert OpenAPI QueryResponse to dataclass QueryResponse from pinecone.utils.response_info import extract_response_info @@ -178,7 +181,7 @@ def __init__( additional_headers: Optional[Dict[str, str]] = {}, openapi_config=None, **kwargs, - ): + ) -> None: self.config = ConfigBuilder.build( api_key=api_key, host=host, additional_headers=additional_headers, **kwargs ) @@ -186,8 +189,9 @@ def __init__( self._openapi_config = ConfigBuilder.build_openapi_config(self.config, openapi_config) """ :meta private: """ - if kwargs.get("connection_pool_maxsize", None): - self._openapi_config.connection_pool_maxsize = kwargs.get("connection_pool_maxsize") + connection_pool_maxsize = kwargs.get("connection_pool_maxsize", None) + if connection_pool_maxsize is not None: + self._openapi_config.connection_pool_maxsize = connection_pool_maxsize self._vector_api = setup_async_openapi_client( api_client_klass=AsyncioApiClient, @@ -207,13 +211,16 @@ def __init__( self._namespace_resource = None """ :meta private: """ - async def __aenter__(self): + async def __aenter__(self) -> Self: return self - async def __aexit__(self, exc_type, exc_value, traceback): + async def __aexit__( + self, exc_type: Optional[type], exc_value: Optional[Exception], traceback: Optional[Any] + ) -> Optional[bool]: await self._api_client.close() + return None - async def close(self): + async def close(self) -> None: """Cleanup resources used by the Pinecone Index client. This method should be called when the client is no longer needed so that @@ -287,9 +294,9 @@ def namespace(self) -> "NamespaceResourceAsyncio": @validate_and_convert_errors async def upsert( self, - vectors: Union[ - List[Vector], List[VectorTuple], List[VectorTupleWithMetadata], List[VectorTypedDict] - ], + vectors: ( + List[Vector] | List[VectorTuple] | List[VectorTupleWithMetadata] | List[VectorTypedDict] + ), namespace: Optional[str] = None, batch_size: Optional[int] = None, show_progress: bool = True, @@ -332,9 +339,9 @@ async def upsert( @validate_and_convert_errors async def _upsert_batch( self, - vectors: Union[ - List[Vector], List[VectorTuple], List[VectorTupleWithMetadata], List[VectorTypedDict] - ], + vectors: ( + List[Vector] | List[VectorTuple] | List[VectorTupleWithMetadata] | List[VectorTypedDict] + ), namespace: Optional[str], _check_type: bool, **kwargs, @@ -385,7 +392,9 @@ async def delete( [("ids", ids), ("delete_all", delete_all), ("namespace", namespace), ("filter", filter)] ) - return await self._vector_api.delete_vectors( + from typing import cast + + result = await self._vector_api.delete_vectors( DeleteRequest( **args_dict, **{ @@ -397,6 +406,7 @@ async def delete( ), **{k: v for k, v in kwargs.items() if k in _OPENAPI_ENDPOINT_PARAMS}, ) + return cast(Dict[str, Any], result) @validate_and_convert_errors async def fetch( @@ -458,7 +468,7 @@ async def main(): asyncio.run(main()) Args: - filter (Dict[str, Union[str, float, int, bool, List, dict]]): + filter (Dict[str, str | float | int | bool | List | dict]): Metadata filter expression to select vectors. See `metadata filtering _` namespace (str): The namespace to fetch vectors from. @@ -513,7 +523,7 @@ async def query( filter: Optional[FilterTypedDict] = None, include_values: Optional[bool] = None, include_metadata: Optional[bool] = None, - sparse_vector: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_vector: Optional[SparseValues | SparseVectorTypedDict] = None, **kwargs, ) -> QueryResponse: response = await self._query( @@ -540,7 +550,7 @@ async def _query( filter: Optional[FilterTypedDict] = None, include_values: Optional[bool] = None, include_metadata: Optional[bool] = None, - sparse_vector: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_vector: Optional[SparseValues | SparseVectorTypedDict] = None, **kwargs, ) -> OpenAPIQueryResponse: if len(args) > 0: @@ -559,9 +569,12 @@ async def _query( sparse_vector=sparse_vector, **kwargs, ) - return await self._vector_api.query_vectors( + from typing import cast + + result = await self._vector_api.query_vectors( request, **{k: v for k, v in kwargs.items() if k in _OPENAPI_ENDPOINT_PARAMS} ) + return cast(OpenAPIQueryResponse, result) @validate_and_convert_errors async def query_namespaces( @@ -569,13 +582,11 @@ async def query_namespaces( namespaces: List[str], metric: Literal["cosine", "euclidean", "dotproduct"], top_k: Optional[int] = None, - filter: Optional[Dict[str, Union[str, float, int, bool, List, dict]]] = None, + filter: Optional[FilterTypedDict] = None, include_values: Optional[bool] = None, include_metadata: Optional[bool] = None, vector: Optional[List[float]] = None, - sparse_vector: Optional[ - Union[SparseValues, Dict[str, Union[List[float], List[int]]]] - ] = None, + sparse_vector: Optional[SparseValues | SparseVectorTypedDict] = None, **kwargs, ) -> QueryNamespacesResults: if namespaces is None or len(namespaces) == 0: @@ -593,10 +604,10 @@ async def query_namespaces( top_k=overall_topk, vector=vector, namespace=ns, - filter=filter, # type: ignore[arg-type] + filter=filter, include_values=include_values, include_metadata=include_metadata, - sparse_vector=sparse_vector, # type: ignore[arg-type] + sparse_vector=sparse_vector, async_threadpool_executor=True, _preload_content=False, **kwargs, @@ -627,7 +638,7 @@ async def update( values: Optional[List[float]] = None, set_metadata: Optional[VectorMetadataTypedDict] = None, namespace: Optional[str] = None, - sparse_values: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_values: Optional[SparseValues | SparseVectorTypedDict] = None, filter: Optional[FilterTypedDict] = None, dry_run: Optional[bool] = None, **kwargs, @@ -679,10 +690,13 @@ async def update( async def describe_index_stats( self, filter: Optional[FilterTypedDict] = None, **kwargs ) -> DescribeIndexStatsResponse: - return await self._vector_api.describe_index_stats( + from typing import cast + + result = await self._vector_api.describe_index_stats( IndexRequestFactory.describe_index_stats_request(filter, **kwargs), **self._openapi_kwargs(kwargs), ) + return cast(DescribeIndexStatsResponse, result) @validate_and_convert_errors async def list_paginated( @@ -700,10 +714,13 @@ async def list_paginated( namespace=namespace, **kwargs, ) - return await self._vector_api.list_vectors(**args_dict, **kwargs) + from typing import cast + + result = await self._vector_api.list_vectors(**args_dict, **kwargs) + return cast(ListResponse, result) @validate_and_convert_errors - async def list(self, **kwargs): + async def list(self, **kwargs) -> AsyncIterator[List[str]]: done = False while not done: results = await self.list_paginated(**kwargs) @@ -744,8 +761,8 @@ async def upsert_records(self, namespace: str, records: List[Dict]) -> UpsertRes async def search( self, namespace: str, - query: Union[SearchQueryTypedDict, SearchQuery], - rerank: Optional[Union[SearchRerankTypedDict, SearchRerank]] = None, + query: SearchQueryTypedDict | SearchQuery, + rerank: Optional[SearchRerankTypedDict | SearchRerank] = None, fields: Optional[List[str]] = ["*"], # Default to returning all fields ) -> SearchRecordsResponse: if namespace is None: @@ -753,13 +770,16 @@ async def search( request = IndexRequestFactory.search_request(query=query, rerank=rerank, fields=fields) - return await self._vector_api.search_records_namespace(namespace, request) + from typing import cast + + result = await self._vector_api.search_records_namespace(namespace, request) + return cast(SearchRecordsResponse, result) async def search_records( self, namespace: str, - query: Union[SearchQueryTypedDict, SearchQuery], - rerank: Optional[Union[SearchRerankTypedDict, SearchRerank]] = None, + query: SearchQueryTypedDict | SearchQuery, + rerank: Optional[SearchRerankTypedDict | SearchRerank] = None, fields: Optional[List[str]] = ["*"], # Default to returning all fields ) -> SearchRecordsResponse: return await self.search(namespace, query=query, rerank=rerank, fields=fields) @@ -891,11 +911,14 @@ async def describe_namespace(self, namespace: str, **kwargs) -> "NamespaceDescri @validate_and_convert_errors @require_kwargs async def delete_namespace(self, namespace: str, **kwargs) -> Dict[str, Any]: - return await self.namespace.delete(namespace=namespace, **kwargs) + from typing import cast + + result = await self.namespace.delete(namespace=namespace, **kwargs) + return cast(Dict[str, Any], result) @validate_and_convert_errors @require_kwargs - async def list_namespaces( + async def list_namespaces( # type: ignore[override, misc] # mypy limitation: async generators in abstract methods self, limit: Optional[int] = None, **kwargs ) -> AsyncIterator[ListNamespacesResponse]: async for namespace in self.namespace.list(limit=limit, **kwargs): diff --git a/pinecone/db_data/index_asyncio_interface.py b/pinecone/db_data/index_asyncio_interface.py index 8996f6a81..a245804f0 100644 --- a/pinecone/db_data/index_asyncio_interface.py +++ b/pinecone/db_data/index_asyncio_interface.py @@ -1,11 +1,11 @@ +from __future__ import annotations + from abc import ABC, abstractmethod -from typing import Union, List, Optional, Dict, Any, AsyncIterator +from typing import List, Optional, Dict, Any, AsyncIterator, Literal from pinecone.core.openapi.db_data.models import ( IndexDescription as DescribeIndexStatsResponse, - Vector, ListResponse, - SparseValues, SearchRecordsResponse, NamespaceDescription, ListNamespacesResponse, @@ -22,6 +22,8 @@ SearchRerankTypedDict, ) from .dataclasses import ( + Vector, + SparseValues, SearchQuery, SearchRerank, FetchResponse, @@ -37,9 +39,9 @@ class IndexAsyncioInterface(ABC): @abstractmethod async def upsert( self, - vectors: Union[ - List[Vector], List[VectorTuple], List[VectorTupleWithMetadata], List[VectorTypedDict] - ], + vectors: ( + List[Vector] | List[VectorTuple] | List[VectorTupleWithMetadata] | List[VectorTypedDict] + ), namespace: Optional[str] = None, batch_size: Optional[int] = None, show_progress: bool = True, @@ -193,7 +195,7 @@ async def delete( namespace: Optional[str] = None, filter: Optional[FilterTypedDict] = None, **kwargs, - ) -> UpdateResponse: + ) -> Dict[str, Any]: """ Args: ids (List[str]): Vector ids to delete [optional] @@ -352,7 +354,7 @@ async def query( filter: Optional[FilterTypedDict] = None, include_values: Optional[bool] = None, include_metadata: Optional[bool] = None, - sparse_vector: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_vector: Optional[SparseValues | SparseVectorTypedDict] = None, **kwargs, ) -> QueryResponse: """ @@ -467,12 +469,13 @@ async def main(): async def query_namespaces( self, namespaces: List[str], + metric: Literal["cosine", "euclidean", "dotproduct"], top_k: Optional[int] = None, filter: Optional[FilterTypedDict] = None, include_values: Optional[bool] = None, include_metadata: Optional[bool] = None, vector: Optional[List[float]] = None, - sparse_vector: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_vector: Optional[SparseValues | SparseVectorTypedDict] = None, **kwargs, ) -> QueryNamespacesResults: """The query_namespaces() method is used to make a query to multiple namespaces in parallel and combine the results into one result set. @@ -529,7 +532,7 @@ async def update( values: Optional[List[float]] = None, set_metadata: Optional[VectorMetadataTypedDict] = None, namespace: Optional[str] = None, - sparse_values: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_values: Optional[SparseValues | SparseVectorTypedDict] = None, filter: Optional[FilterTypedDict] = None, dry_run: Optional[bool] = None, **kwargs, @@ -821,8 +824,8 @@ async def main(): async def search( self, namespace: str, - query: Union[SearchQueryTypedDict, SearchQuery], - rerank: Optional[Union[SearchRerankTypedDict, SearchRerank]] = None, + query: SearchQueryTypedDict | SearchQuery, + rerank: Optional[SearchRerankTypedDict | SearchRerank] = None, fields: Optional[List[str]] = ["*"], # Default to returning all fields ) -> SearchRecordsResponse: """ @@ -917,8 +920,8 @@ async def main(): async def search_records( self, namespace: str, - query: Union[SearchQueryTypedDict, SearchQuery], - rerank: Optional[Union[SearchRerankTypedDict, SearchRerank]] = None, + query: SearchQueryTypedDict | SearchQuery, + rerank: Optional[SearchRerankTypedDict | SearchRerank] = None, fields: Optional[List[str]] = ["*"], # Default to returning all fields ) -> SearchRecordsResponse: """Alias of the search() method.""" diff --git a/pinecone/db_data/interfaces.py b/pinecone/db_data/interfaces.py index 0974694b2..3ac888d46 100644 --- a/pinecone/db_data/interfaces.py +++ b/pinecone/db_data/interfaces.py @@ -1,11 +1,11 @@ +from __future__ import annotations + from abc import ABC, abstractmethod -from typing import Union, List, Optional, Dict, Any, Iterator +from typing import List, Optional, Dict, Any, Iterator, Literal from pinecone.core.openapi.db_data.models import ( IndexDescription as DescribeIndexStatsResponse, - Vector, ListResponse, - SparseValues, SearchRecordsResponse, NamespaceDescription, ListNamespacesResponse, @@ -30,6 +30,8 @@ QueryResponse, UpsertResponse, UpdateResponse, + SparseValues, + Vector, ) from pinecone.utils import require_kwargs @@ -38,14 +40,14 @@ class IndexInterface(ABC): @abstractmethod def upsert( self, - vectors: Union[ - List[Vector], List[VectorTuple], List[VectorTupleWithMetadata], List[VectorTypedDict] - ], + vectors: ( + List[Vector] | List[VectorTuple] | List[VectorTupleWithMetadata] | List[VectorTypedDict] + ), namespace: Optional[str] = None, batch_size: Optional[int] = None, show_progress: bool = True, **kwargs, - ) -> UpsertResponse: + ) -> UpsertResponse | ApplyResult: """ Args: vectors (Union[List[Vector], List[VectorTuple], List[VectorTupleWithMetadata], List[VectorTypedDict]]): A list of vectors to upsert. @@ -350,8 +352,8 @@ def upsert_records(self, namespace: str, records: List[Dict]) -> UpsertResponse: def search( self, namespace: str, - query: Union[SearchQueryTypedDict, SearchQuery], - rerank: Optional[Union[SearchRerankTypedDict, SearchRerank]] = None, + query: SearchQueryTypedDict | SearchQuery, + rerank: Optional[SearchRerankTypedDict | SearchRerank] = None, fields: Optional[List[str]] = ["*"], # Default to returning all fields ) -> SearchRecordsResponse: """ @@ -456,8 +458,8 @@ def search( def search_records( self, namespace: str, - query: Union[SearchQueryTypedDict, SearchQuery], - rerank: Optional[Union[SearchRerankTypedDict, SearchRerank]] = None, + query: SearchQueryTypedDict | SearchQuery, + rerank: Optional[SearchRerankTypedDict | SearchRerank] = None, fields: Optional[List[str]] = ["*"], # Default to returning all fields ) -> SearchRecordsResponse: """Alias of the search() method.""" @@ -471,7 +473,7 @@ def delete( namespace: Optional[str] = None, filter: Optional[FilterTypedDict] = None, **kwargs, - ) -> UpdateResponse: + ) -> Dict[str, Any]: """ Args: ids (List[str]): Vector ids to delete [optional] @@ -589,9 +591,9 @@ def query( filter: Optional[FilterTypedDict] = None, include_values: Optional[bool] = None, include_metadata: Optional[bool] = None, - sparse_vector: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_vector: Optional[SparseValues | SparseVectorTypedDict] = None, **kwargs, - ) -> Union[QueryResponse, ApplyResult]: + ) -> QueryResponse | ApplyResult: """ The Query operation searches a namespace, using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. @@ -638,13 +640,14 @@ def query( @abstractmethod def query_namespaces( self, - vector: List[float], + vector: Optional[List[float]], namespaces: List[str], + metric: Literal["cosine", "euclidean", "dotproduct"], top_k: Optional[int] = None, filter: Optional[FilterTypedDict] = None, include_values: Optional[bool] = None, include_metadata: Optional[bool] = None, - sparse_vector: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_vector: Optional[SparseValues | SparseVectorTypedDict] = None, **kwargs, ) -> QueryNamespacesResults: """The ``query_namespaces()`` method is used to make a query to multiple namespaces in parallel and combine the results into one result set. @@ -714,7 +717,7 @@ def update( values: Optional[List[float]] = None, set_metadata: Optional[VectorMetadataTypedDict] = None, namespace: Optional[str] = None, - sparse_values: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_values: Optional[SparseValues | SparseVectorTypedDict] = None, filter: Optional[FilterTypedDict] = None, dry_run: Optional[bool] = None, **kwargs, diff --git a/pinecone/db_data/request_factory.py b/pinecone/db_data/request_factory.py index 23125abb5..b8c9ba96d 100644 --- a/pinecone/db_data/request_factory.py +++ b/pinecone/db_data/request_factory.py @@ -1,5 +1,7 @@ +from __future__ import annotations + import logging -from typing import Union, List, Optional, Dict, Any, cast +from typing import List, Dict, Any from pinecone.core.openapi.db_data.models import ( QueryRequest, @@ -15,6 +17,7 @@ VectorValues, SearchRecordsVector, UpsertRecord, + Vector as OpenApiVector, ) from ..utils import parse_non_empty_args, convert_enum_to_string from .vector_factory import VectorFactory @@ -46,13 +49,13 @@ class IndexRequestFactory: @staticmethod def query_request( top_k: int, - vector: Optional[List[float]] = None, - id: Optional[str] = None, - namespace: Optional[str] = None, - filter: Optional[FilterTypedDict] = None, - include_values: Optional[bool] = None, - include_metadata: Optional[bool] = None, - sparse_vector: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + vector: List[float] | None = None, + id: str | None = None, + namespace: str | None = None, + filter: FilterTypedDict | None = None, + include_values: bool | None = None, + include_metadata: bool | None = None, + sparse_vector: SparseValues | SparseVectorTypedDict | None = None, **kwargs, ) -> QueryRequest: if vector is not None and id is not None: @@ -73,51 +76,58 @@ def query_request( ] ) - return QueryRequest( + result: QueryRequest = QueryRequest( **args_dict, _check_type=kwargs.pop("_check_type", False), **non_openapi_kwargs(kwargs) ) + return result @staticmethod def upsert_request( - vectors: Union[ - List[Vector], List[VectorTuple], List[VectorTupleWithMetadata], List[VectorTypedDict] - ], - namespace: Optional[str], + vectors: ( + List[Vector] | List[VectorTuple] | List[VectorTupleWithMetadata] | List[VectorTypedDict] + ), + namespace: str | None, _check_type: bool, **kwargs, ) -> UpsertRequest: args_dict = parse_non_empty_args([("namespace", namespace)]) - def vec_builder(v): + def vec_builder( + v: Vector | VectorTuple | VectorTupleWithMetadata | VectorTypedDict, + ) -> OpenApiVector: return VectorFactory.build(v, check_type=_check_type) - return UpsertRequest( + result: UpsertRequest = UpsertRequest( vectors=list(map(vec_builder, vectors)), **args_dict, _check_type=_check_type, **non_openapi_kwargs(kwargs), ) + return result @staticmethod def delete_request( - ids: Optional[List[str]] = None, - delete_all: Optional[bool] = None, - namespace: Optional[str] = None, - filter: Optional[Dict[str, Union[str, float, int, bool, List, dict]]] = None, + ids: List[str] | None = None, + delete_all: bool | None = None, + namespace: str | None = None, + filter: FilterTypedDict | None = None, **kwargs, ) -> DeleteRequest: _check_type = kwargs.pop("_check_type", False) args_dict = parse_non_empty_args( [("ids", ids), ("delete_all", delete_all), ("namespace", namespace), ("filter", filter)] ) - return DeleteRequest(**args_dict, **non_openapi_kwargs(kwargs), _check_type=_check_type) + result: DeleteRequest = DeleteRequest( + **args_dict, **non_openapi_kwargs(kwargs), _check_type=_check_type + ) + return result @staticmethod def fetch_by_metadata_request( filter: FilterTypedDict, - namespace: Optional[str] = None, - limit: Optional[int] = None, - pagination_token: Optional[str] = None, + namespace: str | None = None, + limit: int | None = None, + pagination_token: str | None = None, **kwargs, ) -> FetchByMetadataRequest: _check_type = kwargs.pop("_check_type", False) @@ -129,19 +139,20 @@ def fetch_by_metadata_request( ("pagination_token", pagination_token), ] ) - return FetchByMetadataRequest( + result: FetchByMetadataRequest = FetchByMetadataRequest( **args_dict, **non_openapi_kwargs(kwargs), _check_type=_check_type ) + return result @staticmethod def update_request( - id: Optional[str] = None, - values: Optional[List[float]] = None, - set_metadata: Optional[VectorMetadataTypedDict] = None, - namespace: Optional[str] = None, - sparse_values: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, - filter: Optional[FilterTypedDict] = None, - dry_run: Optional[bool] = None, + id: str | None = None, + values: List[float] | None = None, + set_metadata: VectorMetadataTypedDict | None = None, + namespace: str | None = None, + sparse_values: SparseValues | SparseVectorTypedDict | None = None, + filter: FilterTypedDict | None = None, + dry_run: bool | None = None, **kwargs, ) -> UpdateRequest: _check_type = kwargs.pop("_check_type", False) @@ -158,25 +169,29 @@ def update_request( ] ) - return UpdateRequest(**args_dict, _check_type=_check_type, **non_openapi_kwargs(kwargs)) + result: UpdateRequest = UpdateRequest( + **args_dict, _check_type=_check_type, **non_openapi_kwargs(kwargs) + ) + return result @staticmethod def describe_index_stats_request( - filter: Optional[FilterTypedDict] = None, **kwargs + filter: FilterTypedDict | None = None, **kwargs ) -> DescribeIndexStatsRequest: _check_type = kwargs.pop("_check_type", False) args_dict = parse_non_empty_args([("filter", filter)]) - return DescribeIndexStatsRequest( + result: DescribeIndexStatsRequest = DescribeIndexStatsRequest( **args_dict, **non_openapi_kwargs(kwargs), _check_type=_check_type ) + return result @staticmethod def list_paginated_args( - prefix: Optional[str] = None, - limit: Optional[int] = None, - pagination_token: Optional[str] = None, - namespace: Optional[str] = None, + prefix: str | None = None, + limit: int | None = None, + pagination_token: str | None = None, + namespace: str | None = None, **kwargs, ) -> Dict[str, Any]: return parse_non_empty_args( @@ -190,9 +205,9 @@ def list_paginated_args( @staticmethod def search_request( - query: Union[SearchQueryTypedDict, SearchQuery], - rerank: Optional[Union[SearchRerankTypedDict, SearchRerank]] = None, - fields: Optional[List[str]] = ["*"], # Default to returning all fields + query: SearchQueryTypedDict | SearchQuery, + rerank: SearchRerankTypedDict | SearchRerank | None = None, + fields: List[str] | None = ["*"], # Default to returning all fields ) -> SearchRecordsRequest: request_args = parse_non_empty_args( [ @@ -202,16 +217,16 @@ def search_request( ] ) - return SearchRecordsRequest(**request_args) + result: SearchRecordsRequest = SearchRecordsRequest(**request_args) + return result @staticmethod - def _parse_search_query( - query: Union[SearchQueryTypedDict, SearchQuery], - ) -> SearchRecordsRequestQuery: + def _parse_search_query(query: SearchQueryTypedDict | SearchQuery) -> SearchRecordsRequestQuery: if isinstance(query, SearchQuery): query_dict = query.as_dict() else: - query_dict = cast(dict[str, Any], query) + # query is SearchQueryTypedDict which is a TypedDict, so it's already a dict + query_dict = query # type: ignore[assignment] required_fields = {"top_k"} for key in required_fields: @@ -234,12 +249,13 @@ def _parse_search_query( srrq.vector = IndexRequestFactory._parse_search_vector(query_dict["vector"]) if match_terms is not None: srrq.match_terms = match_terms - return srrq + result: SearchRecordsRequestQuery = srrq + return result @staticmethod def _parse_search_vector( - vector: Optional[Union[SearchQueryVectorTypedDict, SearchQueryVector]], - ): + vector: SearchQueryVectorTypedDict | SearchQueryVector | None, + ) -> SearchRecordsVector | None: if vector is None: return None @@ -248,30 +264,36 @@ def _parse_search_vector( return None vector_dict = vector.as_dict() else: - vector_dict = cast(dict[str, Any], vector) + # vector is SearchQueryVectorTypedDict which is a TypedDict, so it's already a dict + vector_dict = vector # type: ignore[assignment] if ( vector_dict.get("values", None) is None and vector_dict.get("sparse_values", None) is None ): return None + from typing import cast + srv = SearchRecordsVector(**{k: v for k, v in vector_dict.items() if k not in {"values"}}) values = vector_dict.get("values", None) if values is not None: srv.values = VectorValues(value=values) - return srv + return cast(SearchRecordsVector, srv) @staticmethod - def _parse_search_rerank(rerank: Optional[Union[SearchRerankTypedDict, SearchRerank]] = None): + def _parse_search_rerank( + rerank: SearchRerankTypedDict | SearchRerank | None = None, + ) -> SearchRecordsRequestRerank | None: if rerank is None: return None if isinstance(rerank, SearchRerank): rerank_dict = rerank.as_dict() else: - rerank_dict = cast(dict[str, Any], rerank) + # rerank is SearchRerankTypedDict which is a TypedDict, so it's already a dict + rerank_dict = rerank # type: ignore[assignment] required_fields = {"model", "rank_fields"} for key in required_fields: @@ -280,10 +302,11 @@ def _parse_search_rerank(rerank: Optional[Union[SearchRerankTypedDict, SearchRer rerank_dict["model"] = convert_enum_to_string(rerank_dict["model"]) - return SearchRecordsRequestRerank(**rerank_dict) + result: SearchRecordsRequestRerank = SearchRecordsRequestRerank(**rerank_dict) + return result @staticmethod - def upsert_records_args(namespace: str, records: List[Dict]): + def upsert_records_args(namespace: str, records: List[Dict[str, Any]]) -> Dict[str, Any]: if namespace is None: raise ValueError("namespace is required when upserting records") if not records or len(records) == 0: diff --git a/pinecone/db_data/resources/asyncio/bulk_import_asyncio.py b/pinecone/db_data/resources/asyncio/bulk_import_asyncio.py index 6d9cf88f6..41c537ab2 100644 --- a/pinecone/db_data/resources/asyncio/bulk_import_asyncio.py +++ b/pinecone/db_data/resources/asyncio/bulk_import_asyncio.py @@ -50,7 +50,10 @@ async def start( req = BulkImportRequestFactory.start_import_request( uri=uri, integration_id=integration_id, error_mode=error_mode ) - return await self.__import_operations_api.start_bulk_import(req) + from typing import cast + + result = await self.__import_operations_api.start_bulk_import(req) + return cast(StartImportResponse, result) async def list(self, **kwargs) -> AsyncIterator["ImportModel"]: """ @@ -117,7 +120,10 @@ async def list_paginated( args_dict = BulkImportRequestFactory.list_imports_paginated_args( limit=limit, pagination_token=pagination_token, **kwargs ) - return await self.__import_operations_api.list_bulk_imports(**args_dict) + from typing import cast + + result = await self.__import_operations_api.list_bulk_imports(**args_dict) + return cast(ListImportsResponse, result) async def describe(self, id: str) -> ImportModel: """ @@ -131,7 +137,10 @@ async def describe(self, id: str) -> ImportModel: `describe_import` is used to get detailed information about a specific import operation. """ args = BulkImportRequestFactory.describe_import_args(id=id) - return await self.__import_operations_api.describe_bulk_import(**args) + from typing import cast + + result = await self.__import_operations_api.describe_bulk_import(**args) + return cast(ImportModel, result) async def cancel(self, id: str): """Cancel an import operation. diff --git a/pinecone/db_data/resources/asyncio/namespace_asyncio.py b/pinecone/db_data/resources/asyncio/namespace_asyncio.py index 13180fd77..0a408faef 100644 --- a/pinecone/db_data/resources/asyncio/namespace_asyncio.py +++ b/pinecone/db_data/resources/asyncio/namespace_asyncio.py @@ -32,8 +32,11 @@ async def create( **Note:** This operation is not supported for pod-based indexes. """ + from typing import cast + args = NamespaceRequestFactory.create_namespace_args(name=name, schema=schema, **kwargs) - return await self.__namespace_operations_api.create_namespace(**args) + result = await self.__namespace_operations_api.create_namespace(**args) + return cast(NamespaceDescription, result) @require_kwargs async def describe(self, namespace: str, **kwargs) -> NamespaceDescription: @@ -46,8 +49,11 @@ async def describe(self, namespace: str, **kwargs) -> NamespaceDescription: Describe a namespace within an index, showing the vector count within the namespace. """ + from typing import cast + args = NamespaceRequestFactory.describe_namespace_args(namespace=namespace, **kwargs) - return await self.__namespace_operations_api.describe_namespace(**args) + result = await self.__namespace_operations_api.describe_namespace(**args) + return cast(NamespaceDescription, result) @require_kwargs async def delete(self, namespace: str, **kwargs): @@ -122,7 +128,10 @@ async def list_paginated( eyJza2lwX3Bhc3QiOiI5OTMiLCJwcmVmaXgiOiI5OSJ9 >>> next_results = await index.list_paginated(limit=5, pagination_token=results.pagination.next) """ + from typing import cast + args = NamespaceRequestFactory.list_namespaces_args( limit=limit, pagination_token=pagination_token, **kwargs ) - return await self.__namespace_operations_api.list_namespaces_operation(**args) + result = await self.__namespace_operations_api.list_namespaces_operation(**args) + return cast(ListNamespacesResponse, result) diff --git a/pinecone/db_data/resources/asyncio/record_asyncio.py b/pinecone/db_data/resources/asyncio/record_asyncio.py index 14cd6b28d..1f23f9a14 100644 --- a/pinecone/db_data/resources/asyncio/record_asyncio.py +++ b/pinecone/db_data/resources/asyncio/record_asyncio.py @@ -144,7 +144,10 @@ async def search( request = IndexRequestFactory.search_request(query=query, rerank=rerank, fields=fields) - return await self._vector_api.search_records_namespace(namespace, request) + from typing import cast + + result = await self._vector_api.search_records_namespace(namespace, request) + return cast(SearchRecordsResponse, result) @validate_and_convert_errors async def search_records( diff --git a/pinecone/db_data/resources/asyncio/vector_asyncio.py b/pinecone/db_data/resources/asyncio/vector_asyncio.py index e4d953314..86a5371db 100644 --- a/pinecone/db_data/resources/asyncio/vector_asyncio.py +++ b/pinecone/db_data/resources/asyncio/vector_asyncio.py @@ -1,8 +1,10 @@ +from __future__ import annotations + from pinecone.utils.tqdm import tqdm import logging import asyncio import json -from typing import Union, List, Optional, Dict, Any, Literal, AsyncIterator +from typing import List, Optional, Dict, Any, Literal, AsyncIterator from pinecone.core.openapi.db_data.api.vector_operations_api import AsyncioVectorOperationsApi from pinecone.core.openapi.db_data.models import ( @@ -53,7 +55,7 @@ """ :meta private: """ -def parse_query_response(response: OpenAPIQueryResponse): +def parse_query_response(response: OpenAPIQueryResponse) -> QueryResponse: """:meta private:""" # Convert OpenAPI QueryResponse to dataclass QueryResponse from pinecone.utils.response_info import extract_response_info @@ -95,9 +97,9 @@ def _openapi_kwargs(self, kwargs: Dict[str, Any]) -> Dict[str, Any]: @validate_and_convert_errors async def upsert( self, - vectors: Union[ - List[Vector], List[VectorTuple], List[VectorTupleWithMetadata], List[VectorTypedDict] - ], + vectors: ( + List[Vector] | List[VectorTuple] | List[VectorTupleWithMetadata] | List[VectorTypedDict] + ), namespace: Optional[str] = None, batch_size: Optional[int] = None, show_progress: bool = True, @@ -168,9 +170,9 @@ async def upsert( @validate_and_convert_errors async def _upsert_batch( self, - vectors: Union[ - List[Vector], List[VectorTuple], List[VectorTupleWithMetadata], List[VectorTypedDict] - ], + vectors: ( + List[Vector] | List[VectorTuple] | List[VectorTupleWithMetadata] | List[VectorTypedDict] + ), namespace: Optional[str], _check_type: bool, **kwargs, @@ -259,7 +261,9 @@ async def delete( [("ids", ids), ("delete_all", delete_all), ("namespace", namespace), ("filter", filter)] ) - return await self._vector_api.delete_vectors( + from typing import cast + + result = await self._vector_api.delete_vectors( DeleteRequest( **args_dict, **{ @@ -271,6 +275,7 @@ async def delete( ), **{k: v for k, v in kwargs.items() if k in _OPENAPI_ENDPOINT_PARAMS}, ) + return cast(Dict[str, Any], result) @validate_and_convert_errors async def fetch( @@ -396,7 +401,7 @@ async def query( filter: Optional[FilterTypedDict] = None, include_values: Optional[bool] = None, include_metadata: Optional[bool] = None, - sparse_vector: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_vector: Optional[SparseValues | SparseVectorTypedDict] = None, **kwargs, ) -> QueryResponse: """Query the index. @@ -450,6 +455,7 @@ async def query( sparse_vector=sparse_vector, **kwargs, ) + # parse_query_response already returns QueryResponse return parse_query_response(response) async def _query( @@ -462,7 +468,7 @@ async def _query( filter: Optional[FilterTypedDict] = None, include_values: Optional[bool] = None, include_metadata: Optional[bool] = None, - sparse_vector: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_vector: Optional[SparseValues | SparseVectorTypedDict] = None, **kwargs, ) -> OpenAPIQueryResponse: if len(args) > 0: @@ -481,9 +487,12 @@ async def _query( sparse_vector=sparse_vector, **kwargs, ) - return await self._vector_api.query_vectors( + from typing import cast + + result = await self._vector_api.query_vectors( request, **{k: v for k, v in kwargs.items() if k in _OPENAPI_ENDPOINT_PARAMS} ) + return cast(OpenAPIQueryResponse, result) @validate_and_convert_errors async def query_namespaces( @@ -491,13 +500,11 @@ async def query_namespaces( namespaces: List[str], metric: Literal["cosine", "euclidean", "dotproduct"], top_k: Optional[int] = None, - filter: Optional[Dict[str, Union[str, float, int, bool, List, dict]]] = None, + filter: Optional[Dict[str, str | float | int | bool | List | dict]] = None, include_values: Optional[bool] = None, include_metadata: Optional[bool] = None, vector: Optional[List[float]] = None, - sparse_vector: Optional[ - Union[SparseValues, Dict[str, Union[List[float], List[int]]]] - ] = None, + sparse_vector: Optional[SparseValues | Dict[str, List[float] | List[int]]] = None, **kwargs, ) -> QueryNamespacesResults: """Query across multiple namespaces. @@ -583,7 +590,7 @@ async def update( values: Optional[List[float]] = None, set_metadata: Optional[VectorMetadataTypedDict] = None, namespace: Optional[str] = None, - sparse_values: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_values: Optional[SparseValues | SparseVectorTypedDict] = None, **kwargs, ) -> UpdateResponse: """Update a vector in the index. @@ -657,10 +664,13 @@ async def describe_index_stats( >>> await index.vector.describe_index_stats() >>> await index.vector.describe_index_stats(filter={'key': 'value'}) """ - return await self._vector_api.describe_index_stats( + from typing import cast + + result = await self._vector_api.describe_index_stats( IndexRequestFactory.describe_index_stats_request(filter, **kwargs), **self._openapi_kwargs(kwargs), ) + return cast(DescribeIndexStatsResponse, result) @validate_and_convert_errors async def list_paginated( @@ -705,7 +715,10 @@ async def list_paginated( namespace=namespace, **kwargs, ) - return await self._vector_api.list_vectors(**args_dict, **kwargs) + from typing import cast + + result = await self._vector_api.list_vectors(**args_dict, **kwargs) + return cast(ListResponse, result) @validate_and_convert_errors async def list(self, **kwargs) -> AsyncIterator[List[str]]: diff --git a/pinecone/db_data/resources/sync/bulk_import.py b/pinecone/db_data/resources/sync/bulk_import.py index 35a015d2e..e78b4d68e 100644 --- a/pinecone/db_data/resources/sync/bulk_import.py +++ b/pinecone/db_data/resources/sync/bulk_import.py @@ -51,7 +51,10 @@ def start( req = BulkImportRequestFactory.start_import_request( uri=uri, integration_id=integration_id, error_mode=error_mode ) - return self.__import_operations_api.start_bulk_import(req) + from typing import cast + + result = self.__import_operations_api.start_bulk_import(req) + return cast(StartImportResponse, result) def list(self, **kwargs) -> Iterator[ImportModel]: """ @@ -126,7 +129,10 @@ def list_paginated( args_dict = BulkImportRequestFactory.list_imports_paginated_args( limit=limit, pagination_token=pagination_token, **kwargs ) - return self.__import_operations_api.list_bulk_imports(**args_dict) + from typing import cast + + result = self.__import_operations_api.list_bulk_imports(**args_dict) + return cast(ListImportsResponse, result) def describe(self, id: str) -> ImportModel: """ @@ -140,7 +146,10 @@ def describe(self, id: str) -> ImportModel: describe_import is used to get detailed information about a specific import operation. """ args = BulkImportRequestFactory.describe_import_args(id=id) - return self.__import_operations_api.describe_bulk_import(**args) + from typing import cast + + result = self.__import_operations_api.describe_bulk_import(**args) + return cast(ImportModel, result) def cancel(self, id: str): """Cancel an import operation. diff --git a/pinecone/db_data/resources/sync/bulk_import_request_factory.py b/pinecone/db_data/resources/sync/bulk_import_request_factory.py index fd729efcd..52c088c8f 100644 --- a/pinecone/db_data/resources/sync/bulk_import_request_factory.py +++ b/pinecone/db_data/resources/sync/bulk_import_request_factory.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from enum import Enum -from typing import Optional, TypedDict, Any, Union +from typing import TypedDict, Any from pinecone.core.openapi.db_data.models import ( StartImportRequest, @@ -26,8 +28,8 @@ class BulkImportRequestFactory: @staticmethod def start_import_request( uri: str, - integration_id: Optional[str] = None, - error_mode: Optional[Union[ImportErrorMode, str]] = "CONTINUE", + integration_id: str | None = None, + error_mode: (ImportErrorMode | str) | None = "CONTINUE", ) -> StartImportRequest: if error_mode is None: error_mode = "CONTINUE" @@ -46,11 +48,12 @@ def start_import_request( ] ) - return StartImportRequest(**args_dict) + import_request: StartImportRequest = StartImportRequest(**args_dict) + return import_request @staticmethod def list_imports_paginated_args( - limit: Optional[int] = None, pagination_token: Optional[str] = None, **kwargs + limit: int | None = None, pagination_token: str | None = None, **kwargs ) -> dict[str, Any]: return parse_non_empty_args([("limit", limit), ("pagination_token", pagination_token)]) diff --git a/pinecone/db_data/resources/sync/namespace.py b/pinecone/db_data/resources/sync/namespace.py index 791034e0b..32b098a64 100644 --- a/pinecone/db_data/resources/sync/namespace.py +++ b/pinecone/db_data/resources/sync/namespace.py @@ -40,8 +40,11 @@ def create(self, name: str, schema: Optional[Any] = None, **kwargs) -> Namespace **Note:** This operation is not supported for pod-based indexes. """ + from typing import cast + args = NamespaceRequestFactory.create_namespace_args(name=name, schema=schema, **kwargs) - return self.__namespace_operations_api.create_namespace(**args) + result = self.__namespace_operations_api.create_namespace(**args) + return cast(NamespaceDescription, result) @require_kwargs def describe(self, namespace: str, **kwargs) -> NamespaceDescription: @@ -54,8 +57,11 @@ def describe(self, namespace: str, **kwargs) -> NamespaceDescription: Describe a namespace within an index, showing the vector count within the namespace. """ + from typing import cast + args = NamespaceRequestFactory.describe_namespace_args(namespace=namespace, **kwargs) - return self.__namespace_operations_api.describe_namespace(**args) + result = self.__namespace_operations_api.describe_namespace(**args) + return cast(NamespaceDescription, result) @require_kwargs def delete(self, namespace: str, **kwargs): @@ -128,7 +134,10 @@ def list_paginated( eyJza2lwX3Bhc3QiOiI5OTMiLCJwcmVmaXgiOiI5OSJ9 >>> next_results = index.list_paginated(limit=5, pagination_token=results.pagination.next) """ + from typing import cast + args = NamespaceRequestFactory.list_namespaces_args( limit=limit, pagination_token=pagination_token, **kwargs ) - return self.__namespace_operations_api.list_namespaces_operation(**args) + result = self.__namespace_operations_api.list_namespaces_operation(**args) + return cast(ListNamespacesResponse, result) diff --git a/pinecone/db_data/resources/sync/namespace_request_factory.py b/pinecone/db_data/resources/sync/namespace_request_factory.py index 468dd8a7a..7bc313b99 100644 --- a/pinecone/db_data/resources/sync/namespace_request_factory.py +++ b/pinecone/db_data/resources/sync/namespace_request_factory.py @@ -52,7 +52,7 @@ def create_namespace_args( request_kwargs["schema"] = schema_obj else: # schema is already CreateNamespaceRequestSchema - request_kwargs["schema"] = cast(CreateNamespaceRequestSchema, schema) + request_kwargs["schema"] = schema create_namespace_request = CreateNamespaceRequest(**request_kwargs) base_args = {"create_namespace_request": create_namespace_request} diff --git a/pinecone/db_data/resources/sync/record.py b/pinecone/db_data/resources/sync/record.py index 447071b94..b9683e48c 100644 --- a/pinecone/db_data/resources/sync/record.py +++ b/pinecone/db_data/resources/sync/record.py @@ -142,7 +142,10 @@ def search( request = IndexRequestFactory.search_request(query=query, rerank=rerank, fields=fields) - return self._vector_api.search_records_namespace(namespace, request) + from typing import cast + + result = self._vector_api.search_records_namespace(namespace, request) + return cast(SearchRecordsResponse, result) @validate_and_convert_errors def search_records( diff --git a/pinecone/db_data/resources/sync/vector.py b/pinecone/db_data/resources/sync/vector.py index 1162eff41..1d55b6a09 100644 --- a/pinecone/db_data/resources/sync/vector.py +++ b/pinecone/db_data/resources/sync/vector.py @@ -1,7 +1,9 @@ +from __future__ import annotations + from pinecone.utils.tqdm import tqdm import logging import json -from typing import Union, List, Optional, Dict, Any, Literal +from typing import List, Optional, Dict, Any, Literal from multiprocessing.pool import ApplyResult from concurrent.futures import as_completed @@ -43,7 +45,7 @@ """ :meta private: """ -def parse_query_response(response: OpenAPIQueryResponse): +def parse_query_response(response: OpenAPIQueryResponse) -> QueryResponse: """:meta private:""" # Convert OpenAPI QueryResponse to dataclass QueryResponse from pinecone.utils.response_info import extract_response_info @@ -67,6 +69,34 @@ def parse_query_response(response: OpenAPIQueryResponse): ) +class UpsertResponseTransformer: + """Transformer for converting ApplyResult[OpenAPIUpsertResponse] to UpsertResponse. + + This wrapper transforms the OpenAPI response to our dataclass when .get() is called, + while delegating other methods to the underlying ApplyResult. + """ + + def __init__(self, apply_result: ApplyResult): + self._apply_result = apply_result + + def get(self, timeout=None): + openapi_response = self._apply_result.get(timeout) + from pinecone.utils.response_info import extract_response_info + + response_info = None + if hasattr(openapi_response, "_response_info"): + response_info = openapi_response._response_info + if response_info is None: + response_info = extract_response_info({}) + return UpsertResponse( + upserted_count=openapi_response.upserted_count, _response_info=response_info + ) + + def __getattr__(self, name): + # Delegate other methods to the underlying ApplyResult + return getattr(self._apply_result, name) + + class VectorResource(PluginAware): """Resource for vector operations on a Pinecone index.""" @@ -87,14 +117,14 @@ def _openapi_kwargs(self, kwargs: Dict[str, Any]) -> Dict[str, Any]: @validate_and_convert_errors def upsert( self, - vectors: Union[ - List[Vector], List[VectorTuple], List[VectorTupleWithMetadata], List[VectorTypedDict] - ], + vectors: ( + List[Vector] | List[VectorTuple] | List[VectorTupleWithMetadata] | List[VectorTypedDict] + ), namespace: Optional[str] = None, batch_size: Optional[int] = None, show_progress: bool = True, **kwargs, - ) -> Union[UpsertResponse, ApplyResult]: + ) -> UpsertResponse | ApplyResult: """Upsert vectors into the index. The upsert operation writes vectors into a namespace. If a new value is upserted @@ -138,33 +168,13 @@ def upsert( # If async_req=True, result is an ApplyResult[OpenAPIUpsertResponse] # We need to wrap it to convert to our dataclass when .get() is called if kwargs.get("async_req", False): - # Create a wrapper that transforms the OpenAPI response to our dataclass - class UpsertResponseTransformer: - def __init__(self, apply_result: ApplyResult): - self._apply_result = apply_result - - def get(self, timeout=None): - openapi_response = self._apply_result.get(timeout) - from pinecone.utils.response_info import extract_response_info - - response_info = None - if hasattr(openapi_response, "_response_info"): - response_info = openapi_response._response_info - if response_info is None: - response_info = extract_response_info({}) - return UpsertResponse( - upserted_count=openapi_response.upserted_count, - _response_info=response_info, - ) - - def __getattr__(self, name): - # Delegate other methods to the underlying ApplyResult - return getattr(self._apply_result, name) - # result is ApplyResult when async_req=True - return UpsertResponseTransformer(result) # type: ignore[arg-type, return-value] + from typing import cast + + return cast(UpsertResponse, UpsertResponseTransformer(result)) # type: ignore[arg-type] # result is UpsertResponse when async_req=False - return result # type: ignore[return-value] + # _upsert_batch already returns UpsertResponse when async_req=False + return result if not isinstance(batch_size, int) or batch_size <= 0: raise ValueError("batch_size must be a positive integer") @@ -198,13 +208,13 @@ def __getattr__(self, name): def _upsert_batch( self, - vectors: Union[ - List[Vector], List[VectorTuple], List[VectorTupleWithMetadata], List[VectorTypedDict] - ], + vectors: ( + List[Vector] | List[VectorTuple] | List[VectorTupleWithMetadata] | List[VectorTypedDict] + ), namespace: Optional[str], _check_type: bool, **kwargs, - ) -> Union[UpsertResponse, ApplyResult]: + ) -> UpsertResponse | ApplyResult: # Convert OpenAPI UpsertResponse to dataclass UpsertResponse result = self._vector_api.upsert_vectors( IndexRequestFactory.upsert_request(vectors, namespace, _check_type, **kwargs), @@ -216,7 +226,9 @@ def _upsert_batch( if kwargs.get("async_req", False): # Return ApplyResult - it will be unwrapped by the caller # The ApplyResult contains OpenAPIUpsertResponse which will be converted when .get() is called - return result # type: ignore[return-value] # ApplyResult is not tracked through OpenAPI layers + from typing import cast + + return cast(UpsertResponse, result) # ApplyResult is not tracked through OpenAPI layers from pinecone.utils.response_info import extract_response_info @@ -274,6 +286,11 @@ def upsert_from_dataframe( upserted_count = 0 last_result = None for res in results: + # res is always UpsertResponse when not using async_req + # upsert() doesn't use async_req, so res is always UpsertResponse + assert isinstance( + res, UpsertResponse + ), "Expected UpsertResponse when not using async_req" upserted_count += res.upserted_count last_result = res @@ -294,7 +311,7 @@ def delete( ids: Optional[List[str]] = None, delete_all: Optional[bool] = None, namespace: Optional[str] = None, - filter: Optional[Dict[str, Union[str, float, int, bool, List, dict]]] = None, + filter: Optional[FilterTypedDict] = None, **kwargs, ) -> Dict[str, Any]: """Delete vectors from the index. @@ -320,12 +337,15 @@ def delete( >>> index.vector.delete(delete_all=True, namespace='my_namespace') >>> index.vector.delete(filter={'key': 'value'}, namespace='my_namespace') """ - return self._vector_api.delete_vectors( + from typing import cast + + result = self._vector_api.delete_vectors( IndexRequestFactory.delete_request( ids=ids, delete_all=delete_all, namespace=namespace, filter=filter, **kwargs ), **self._openapi_kwargs(kwargs), ) + return cast(Dict[str, Any], result) @validate_and_convert_errors def fetch(self, ids: List[str], namespace: Optional[str] = None, **kwargs) -> FetchResponse: @@ -447,9 +467,9 @@ def query( filter: Optional[FilterTypedDict] = None, include_values: Optional[bool] = None, include_metadata: Optional[bool] = None, - sparse_vector: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_vector: Optional[SparseValues | SparseVectorTypedDict] = None, **kwargs, - ) -> Union[QueryResponse, ApplyResult]: + ) -> QueryResponse | ApplyResult: """Query the index. The Query operation searches a namespace, using a query vector. It retrieves the @@ -507,6 +527,7 @@ def query( # The response is already an ApplyResult[OpenAPIQueryResponse] return response # type: ignore[return-value] # ApplyResult is not tracked through OpenAPI layers else: + # parse_query_response already returns QueryResponse return parse_query_response(response) def _query( @@ -519,7 +540,7 @@ def _query( filter: Optional[FilterTypedDict] = None, include_values: Optional[bool] = None, include_metadata: Optional[bool] = None, - sparse_vector: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_vector: Optional[SparseValues | SparseVectorTypedDict] = None, **kwargs, ) -> OpenAPIQueryResponse: if len(args) > 0: @@ -541,7 +562,10 @@ def _query( sparse_vector=sparse_vector, **kwargs, ) - return self._vector_api.query_vectors(request, **self._openapi_kwargs(kwargs)) + from typing import cast + + result = self._vector_api.query_vectors(request, **self._openapi_kwargs(kwargs)) + return cast(OpenAPIQueryResponse, result) @validate_and_convert_errors def query_namespaces( @@ -550,12 +574,10 @@ def query_namespaces( namespaces: List[str], metric: Literal["cosine", "euclidean", "dotproduct"], top_k: Optional[int] = None, - filter: Optional[Dict[str, Union[str, float, int, bool, List, dict]]] = None, + filter: Optional[FilterTypedDict] = None, include_values: Optional[bool] = None, include_metadata: Optional[bool] = None, - sparse_vector: Optional[ - Union[SparseValues, Dict[str, Union[List[float], List[int]]]] - ] = None, + sparse_vector: Optional[SparseValues | SparseVectorTypedDict] = None, **kwargs, ) -> QueryNamespacesResults: """Query across multiple namespaces. @@ -618,7 +640,14 @@ def query_namespaces( for ns in target_namespaces ] - for result in as_completed(async_futures): + from typing import cast + from concurrent.futures import Future + + # async_futures is List[QueryResponse | ApplyResult] + # When async_threadpool_executor=True, query returns ApplyResult + # as_completed expects Iterable[Future], so we need to cast + futures: List[Future[Any]] = cast(List[Future[Any]], async_futures) + for result in as_completed(futures): raw_result = result.result() response = json.loads(raw_result.data.decode("utf-8")) aggregator.add_results(response) @@ -633,7 +662,7 @@ def update( values: Optional[List[float]] = None, set_metadata: Optional[VectorMetadataTypedDict] = None, namespace: Optional[str] = None, - sparse_values: Optional[Union[SparseValues, SparseVectorTypedDict]] = None, + sparse_values: Optional[SparseValues | SparseVectorTypedDict] = None, **kwargs, ) -> UpdateResponse: """Update a vector in the index. @@ -707,10 +736,13 @@ def describe_index_stats( >>> index.vector.describe_index_stats() >>> index.vector.describe_index_stats(filter={'key': 'value'}) """ - return self._vector_api.describe_index_stats( + from typing import cast + + result = self._vector_api.describe_index_stats( IndexRequestFactory.describe_index_stats_request(filter, **kwargs), **self._openapi_kwargs(kwargs), ) + return cast(DescribeIndexStatsResponse, result) @validate_and_convert_errors def list_paginated( @@ -755,7 +787,10 @@ def list_paginated( namespace=namespace, **kwargs, ) - return self._vector_api.list_vectors(**args_dict, **kwargs) + from typing import cast + + result = self._vector_api.list_vectors(**args_dict, **kwargs) + return cast(ListResponse, result) @validate_and_convert_errors def list(self, **kwargs): diff --git a/pinecone/db_data/sparse_values_factory.py b/pinecone/db_data/sparse_values_factory.py index 5d07136eb..6139a62b6 100644 --- a/pinecone/db_data/sparse_values_factory.py +++ b/pinecone/db_data/sparse_values_factory.py @@ -1,5 +1,7 @@ +from __future__ import annotations + from collections.abc import Mapping -from typing import Union, Optional +from typing import Any from ..utils import convert_to_list @@ -19,14 +21,18 @@ class SparseValuesFactory: @staticmethod def build( - input: Optional[Union[SparseValues, OpenApiSparseValues, SparseVectorTypedDict]], - ) -> Optional[OpenApiSparseValues]: + input: (SparseValues | OpenApiSparseValues | SparseVectorTypedDict) | None, + ) -> OpenApiSparseValues | None: if input is None: return input if isinstance(input, OpenApiSparseValues): - return input + result_input: OpenApiSparseValues = input + return result_input if isinstance(input, SparseValues): - return OpenApiSparseValues(indices=input.indices, values=input.values) + result: OpenApiSparseValues = OpenApiSparseValues( + indices=input.indices, values=input.values + ) + return result if not isinstance(input, Mapping): raise SparseValuesDictionaryExpectedError(input) if not {"indices", "values"}.issubset(input): @@ -39,21 +45,22 @@ def build( raise ValueError("Sparse values indices and values must have the same length") try: - return OpenApiSparseValues(indices=indices, values=values) + result_dict: OpenApiSparseValues = OpenApiSparseValues(indices=indices, values=values) + return result_dict except TypeError as e: raise SparseValuesTypeError() from e @staticmethod - def _convert_to_list(input, expected_type): + def _convert_to_list(input: Any, expected_type: type) -> list[Any]: try: converted = convert_to_list(input) except TypeError as e: raise SparseValuesTypeError() from e SparseValuesFactory._validate_list_items_type(converted, expected_type) - return converted + return converted # type: ignore[no-any-return] @staticmethod - def _validate_list_items_type(input, expected_type): + def _validate_list_items_type(input: list[Any], expected_type: type) -> None: if len(input) > 0 and not isinstance(input[0], expected_type): raise SparseValuesTypeError() diff --git a/pinecone/db_data/vector_factory.py b/pinecone/db_data/vector_factory.py index 0738617fa..c93f23108 100644 --- a/pinecone/db_data/vector_factory.py +++ b/pinecone/db_data/vector_factory.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import numbers from collections.abc import Iterable, Mapping -from typing import Union, Tuple +from typing import Tuple from ..utils import fix_tuple_length, convert_to_list, parse_non_empty_args from ..utils.constants import REQUIRED_VECTOR_FIELDS, OPTIONAL_VECTOR_FIELDS @@ -21,7 +23,7 @@ MetadataDictionaryExpectedError, ) -from .types import VectorTuple, VectorTypedDict +from .types import VectorTuple, VectorTupleWithMetadata, VectorTypedDict class VectorFactory: @@ -29,10 +31,12 @@ class VectorFactory: @staticmethod def build( - item: Union[OpenApiVector, VectorTuple, VectorTypedDict], check_type: bool = True + item: OpenApiVector | Vector | VectorTuple | VectorTupleWithMetadata | VectorTypedDict, + check_type: bool = True, ) -> OpenApiVector: if isinstance(item, OpenApiVector): - return item + result: OpenApiVector = item + return result elif isinstance(item, Vector): args = parse_non_empty_args( [ @@ -43,7 +47,8 @@ def build( ] ) - return OpenApiVector(**args) + vector_result: OpenApiVector = OpenApiVector(**args) + return vector_result elif isinstance(item, tuple): return VectorFactory._tuple_to_vector(item, check_type) elif isinstance(item, Mapping): @@ -100,7 +105,8 @@ def _dict_to_vector(item, check_type: bool) -> OpenApiVector: raise MetadataDictionaryExpectedError(item) try: - return OpenApiVector(**item, _check_type=check_type) + result: OpenApiVector = OpenApiVector(**item, _check_type=check_type) + return result except TypeError as e: if not isinstance(item["values"], Iterable) or not isinstance( item["values"].__iter__().__next__(), numbers.Real diff --git a/pinecone/grpc/channel_factory.py b/pinecone/grpc/channel_factory.py index 042d21dfe..d65675568 100644 --- a/pinecone/grpc/channel_factory.py +++ b/pinecone/grpc/channel_factory.py @@ -95,7 +95,13 @@ def create_channel(self, endpoint): channel = create_channel_fn(endpoint, options=options_tuple) else: channel_creds = self._build_channel_credentials() - create_channel_fn = grpc.aio.secure_channel if self.use_asyncio else grpc.secure_channel - channel = create_channel_fn(endpoint, credentials=channel_creds, options=options_tuple) + if self.use_asyncio: + channel = grpc.aio.secure_channel( + endpoint, credentials=channel_creds, options=options_tuple + ) + else: + channel = grpc.secure_channel( + endpoint, credentials=channel_creds, options=options_tuple + ) return channel diff --git a/pinecone/grpc/future.py b/pinecone/grpc/future.py index 2aaf59ff9..8aa261e0b 100644 --- a/pinecone/grpc/future.py +++ b/pinecone/grpc/future.py @@ -106,4 +106,4 @@ def _wrap_rpc_exception(self, e): def __del__(self): self._grpc_future.cancel() - self = None # release the reference to the grpc future + # Note: self = None is not valid Python syntax and has no effect diff --git a/pinecone/grpc/grpc_runner.py b/pinecone/grpc/grpc_runner.py index e62c34a3a..9a1ac35a2 100644 --- a/pinecone/grpc/grpc_runner.py +++ b/pinecone/grpc/grpc_runner.py @@ -44,7 +44,7 @@ def run( """ @wraps(func) - def wrapped(): + def wrapped() -> Tuple[Any, Optional[Dict[str, str]]]: user_provided_metadata = metadata or {} _metadata = self._prepare_metadata(user_provided_metadata) try: @@ -107,7 +107,7 @@ async def run_asyncio( """ @wraps(func) - async def wrapped(): + async def wrapped() -> Tuple[Any, Optional[Dict[str, str]]]: user_provided_metadata = metadata or {} _metadata = self._prepare_metadata(user_provided_metadata) try: diff --git a/pinecone/grpc/index_grpc.py b/pinecone/grpc/index_grpc.py index 1b2be170b..dc9fad50d 100644 --- a/pinecone/grpc/index_grpc.py +++ b/pinecone/grpc/index_grpc.py @@ -1,5 +1,17 @@ import logging -from typing import Optional, Dict, Union, List, Tuple, Any, Iterable, cast, Literal +from typing import ( + Optional, + Dict, + Union, + List, + Tuple, + Any, + Iterable, + cast, + Literal, + Iterator, + TYPE_CHECKING, +) from google.protobuf import json_format @@ -24,13 +36,17 @@ from .sparse_values_factory import SparseValuesFactory from pinecone.core.openapi.db_data.models import ( - FetchResponse, - QueryResponse, IndexDescription as DescribeIndexStatsResponse, NamespaceDescription, ListNamespacesResponse, ) -from pinecone.db_data.dataclasses import FetchByMetadataResponse, UpdateResponse, UpsertResponse +from pinecone.db_data.dataclasses import ( + FetchByMetadataResponse, + UpdateResponse, + UpsertResponse, + FetchResponse, + QueryResponse, +) from pinecone.db_control.models.list_response import ListResponse as SimpleListResponse, Pagination from pinecone.core.grpc.protos.db_data_2025_10_pb2 import ( Vector as GRPCVector, @@ -43,7 +59,6 @@ UpdateRequest, ListRequest, DescribeIndexStatsRequest, - DeleteResponse, SparseValues as GRPCSparseValues, DescribeNamespaceRequest, DeleteNamespaceRequest, @@ -57,6 +72,9 @@ from pinecone.db_data.query_results_aggregator import QueryNamespacesResults, QueryResultsAggregator from .base import GRPCIndexBase from .future import PineconeGrpcFuture + +if TYPE_CHECKING: + from typing import Type from ..db_data.types import ( SparseVectorTypedDict, VectorTypedDict, @@ -83,7 +101,7 @@ class GRPCIndex(GRPCIndexBase): """A client for interacting with a Pinecone index via GRPC API.""" @property - def stub_class(self): + def stub_class(self) -> "Type[VectorServiceStub]": """:meta private:""" return VectorServiceStub @@ -217,7 +235,7 @@ def _upsert_batch( def upsert_from_dataframe( self, - df, + df: Any, namespace: str = "", batch_size: int = 500, use_async_requests: bool = True, @@ -246,7 +264,12 @@ def upsert_from_dataframe( pbar = tqdm(total=len(df), disable=not show_progress, desc="sending upsert requests") results = [] for chunk in self._iter_dataframe(df, batch_size=batch_size): - res = self.upsert(vectors=chunk, namespace=namespace, async_req=use_async_requests) + # Type cast: dataframe dicts match VectorTypedDict structure + res = self.upsert( + vectors=cast(List[VectorTypedDict], chunk), + namespace=namespace, + async_req=use_async_requests, + ) pbar.update(len(chunk)) results.append(res) @@ -279,7 +302,7 @@ def upsert_from_dataframe( return UpsertResponse(upserted_count=upserted_count, _response_info=response_info) @staticmethod - def _iter_dataframe(df, batch_size): + def _iter_dataframe(df: Any, batch_size: int) -> Iterator[List[Dict[str, Any]]]: for i in range(0, len(df), batch_size): batch = df.iloc[i : i + batch_size].to_dict(orient="records") yield batch @@ -292,7 +315,7 @@ def delete( filter: Optional[FilterTypedDict] = None, async_req: bool = False, **kwargs, - ) -> Union[DeleteResponse, PineconeGrpcFuture]: + ) -> Union[Dict[str, Any], PineconeGrpcFuture]: """ The Delete operation deletes vectors from the index, from a single namespace. No error raised if the vector id does not exist. @@ -540,7 +563,7 @@ def query( ] = None, async_req: Optional[bool] = False, **kwargs, - ) -> Union[QueryResponse, PineconeGrpcFuture]: + ) -> Union["QueryResponse", PineconeGrpcFuture]: """ The Query operation searches a namespace, using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. @@ -868,7 +891,7 @@ def list_paginated( namespace=response.namespace, vectors=response.vectors, pagination=pagination ) - def list(self, **kwargs): + def list(self, **kwargs) -> Iterator[List[str]]: """ The list operation accepts all of the same arguments as list_paginated, and returns a generator that yields a list of the matching vector ids in each page of results. It automatically handles pagination tokens on your diff --git a/pinecone/grpc/resources/vector_grpc.py b/pinecone/grpc/resources/vector_grpc.py index ab14a3aed..86629bc44 100644 --- a/pinecone/grpc/resources/vector_grpc.py +++ b/pinecone/grpc/resources/vector_grpc.py @@ -19,12 +19,14 @@ from ..vector_factory_grpc import VectorFactoryGRPC from ..sparse_values_factory import SparseValuesFactory -from pinecone.core.openapi.db_data.models import ( +from pinecone.core.openapi.db_data.models import IndexDescription as DescribeIndexStatsResponse +from pinecone.db_data.dataclasses import ( + FetchByMetadataResponse, + UpdateResponse, + UpsertResponse, FetchResponse, QueryResponse, - IndexDescription as DescribeIndexStatsResponse, ) -from pinecone.db_data.dataclasses import FetchByMetadataResponse, UpdateResponse, UpsertResponse from pinecone.db_control.models.list_response import ListResponse as SimpleListResponse, Pagination from pinecone.core.grpc.protos.db_data_2025_10_pb2 import ( Vector as GRPCVector, @@ -36,7 +38,6 @@ UpdateRequest, ListRequest, DescribeIndexStatsRequest, - DeleteResponse, SparseValues as GRPCSparseValues, ) from pinecone import Vector, SparseValues @@ -254,7 +255,7 @@ def delete( filter: Optional[FilterTypedDict] = None, async_req: bool = False, **kwargs, - ) -> Union[DeleteResponse, PineconeGrpcFuture]: + ) -> Union[Dict[str, Any], PineconeGrpcFuture]: """Delete vectors from the index. The Delete operation deletes vectors from the index, from a single namespace. @@ -493,7 +494,7 @@ def query( ] = None, async_req: Optional[bool] = False, **kwargs, - ) -> Union[QueryResponse, PineconeGrpcFuture]: + ) -> Union["QueryResponse", PineconeGrpcFuture]: """Query the index. The Query operation searches a namespace, using a query vector. It retrieves the diff --git a/pinecone/grpc/retry.py b/pinecone/grpc/retry.py index 556031efb..c0ff42f00 100644 --- a/pinecone/grpc/retry.py +++ b/pinecone/grpc/retry.py @@ -52,11 +52,13 @@ def __init__(self, retry_config: "RetryConfig"): def _is_retryable_error(self, response_or_error): """Determine if a response is a retryable error.""" - return ( - isinstance(response_or_error, grpc.RpcError) - and "_MultiThreadedRendezvous" not in response_or_error.__class__.__name__ - and response_or_error.code() in self.retryable_status - ) + if not isinstance(response_or_error, grpc.RpcError): + return False + if "_MultiThreadedRendezvous" in response_or_error.__class__.__name__: + return False + if self.retryable_status is None: + return False + return response_or_error.code() in self.retryable_status def _intercept_call(self, continuation, client_call_details, request_or_iterator): response = None diff --git a/pinecone/grpc/utils.py b/pinecone/grpc/utils.py index fcb2d70b1..cf072e862 100644 --- a/pinecone/grpc/utils.py +++ b/pinecone/grpc/utils.py @@ -1,4 +1,4 @@ -from typing import Optional, Union, Dict +from typing import Optional, Union, Dict, Any from google.protobuf import json_format from google.protobuf.message import Message @@ -23,6 +23,7 @@ Pagination, QueryResponse, UpsertResponse, + UpdateResponse, ) from google.protobuf.struct_pb2 import Struct @@ -40,15 +41,20 @@ def dict_to_proto_struct(d: Optional[dict]) -> "Struct": return s -def parse_sparse_values(sparse_values: dict): - return ( +def parse_sparse_values(sparse_values: Optional[dict]) -> SparseValues: + from typing import cast + + result = ( SparseValues(indices=sparse_values["indices"], values=sparse_values["values"]) if sparse_values else SparseValues(indices=[], values=[]) ) + return cast(SparseValues, result) -def parse_fetch_response(response: Message, initial_metadata: Optional[Dict[str, str]] = None): +def parse_fetch_response( + response: Message, initial_metadata: Optional[Dict[str, str]] = None +) -> FetchResponse: json_response = json_format.MessageToDict(response) vd = {} @@ -78,18 +84,18 @@ def parse_fetch_response(response: Message, initial_metadata: Optional[Dict[str, metadata = initial_metadata or {} response_info = extract_response_info(metadata) + usage = None + if json_response.get("usage"): + usage = parse_usage(json_response.get("usage", {})) fetch_response = FetchResponse( - vectors=vd, - namespace=namespace, - usage=parse_usage(json_response.get("usage", {})), - _response_info=response_info, + vectors=vd, namespace=namespace, usage=usage, _response_info=response_info ) return fetch_response def parse_fetch_by_metadata_response( response: Message, initial_metadata: Optional[Dict[str, str]] = None -): +) -> FetchByMetadataResponse: json_response = json_format.MessageToDict(response) vd = {} @@ -115,23 +121,29 @@ def parse_fetch_by_metadata_response( metadata = initial_metadata or {} response_info = extract_response_info(metadata) + usage = None + if json_response.get("usage"): + usage = parse_usage(json_response.get("usage", {})) fetch_by_metadata_response = FetchByMetadataResponse( vectors=vd, namespace=namespace, - usage=parse_usage(json_response.get("usage", {})), + usage=usage, pagination=pagination, _response_info=response_info, ) return fetch_by_metadata_response -def parse_usage(usage: dict): - return Usage(read_units=int(usage.get("readUnits", 0))) +def parse_usage(usage: dict) -> Usage: + from typing import cast + + result = Usage(read_units=int(usage.get("readUnits", 0))) + return cast(Usage, result) def parse_upsert_response( response: Message, _check_type: bool = False, initial_metadata: Optional[Dict[str, str]] = None -): +) -> UpsertResponse: from pinecone.utils.response_info import extract_response_info json_response = json_format.MessageToDict(response) @@ -149,8 +161,7 @@ def parse_update_response( response: Union[dict, Message], _check_type: bool = False, initial_metadata: Optional[Dict[str, str]] = None, -): - from pinecone.db_data.dataclasses import UpdateResponse +) -> UpdateResponse: from pinecone.utils.response_info import extract_response_info from google.protobuf import json_format @@ -177,14 +188,14 @@ def parse_delete_response( response: Union[dict, Message], _check_type: bool = False, initial_metadata: Optional[Dict[str, str]] = None, -): +) -> Dict[str, Any]: from pinecone.utils.response_info import extract_response_info # Extract response info from initial metadata metadata = initial_metadata or {} response_info = extract_response_info(metadata) - result = {"_response_info": response_info} + result: Dict[str, Any] = {"_response_info": response_info} return result @@ -192,7 +203,7 @@ def parse_query_response( response: Union[dict, Message], _check_type: bool = False, initial_metadata: Optional[Dict[str, str]] = None, -): +) -> QueryResponse: if isinstance(response, Message): json_response = json_format.MessageToDict(response) else: @@ -229,7 +240,7 @@ def parse_query_response( return query_response -def parse_stats_response(response: dict): +def parse_stats_response(response: dict) -> "DescribeIndexStatsResponse": fullness = response.get("indexFullness", 0.0) total_vector_count = response.get("totalVectorCount", 0) # For sparse indexes, dimension is not present, so use None instead of 0 @@ -239,13 +250,16 @@ def parse_stats_response(response: dict): for key in summaries: vc = summaries[key].get("vectorCount", 0) namespace_summaries[key] = NamespaceSummary(vector_count=vc) - return DescribeIndexStatsResponse( + from typing import cast + + result = DescribeIndexStatsResponse( namespaces=namespace_summaries, dimension=dimension, index_fullness=fullness, total_vector_count=total_vector_count, _check_type=False, ) + return cast(DescribeIndexStatsResponse, result) def parse_namespace_description( @@ -276,7 +290,9 @@ def parse_namespace_description( response_info = extract_response_info(metadata) namespace_desc._response_info = response_info - return namespace_desc + from typing import cast + + return cast(NamespaceDescription, namespace_desc) def parse_list_namespaces_response(response: Message) -> ListNamespacesResponse: @@ -309,6 +325,9 @@ def parse_list_namespaces_response(response: Message) -> ListNamespacesResponse: ) total_count = json_response.get("totalCount") - return ListNamespacesResponse( + from typing import cast + + result = ListNamespacesResponse( namespaces=namespaces, pagination=pagination, total_count=total_count, _check_type=False ) + return cast(ListNamespacesResponse, result) diff --git a/pinecone/inference/inference_request_builder.py b/pinecone/inference/inference_request_builder.py index 3e10c1fec..24c842697 100644 --- a/pinecone/inference/inference_request_builder.py +++ b/pinecone/inference/inference_request_builder.py @@ -42,10 +42,14 @@ def embed_request( else: raise Exception("Invalid type for variable 'inputs'") + from typing import cast + if parameters: - return EmbedRequest(model=model, inputs=embeddings_inputs, parameters=parameters) + result = EmbedRequest(model=model, inputs=embeddings_inputs, parameters=parameters) + return cast(EmbedRequest, result) else: - return EmbedRequest(model=model, inputs=embeddings_inputs) + result = EmbedRequest(model=model, inputs=embeddings_inputs) + return cast(EmbedRequest, result) @staticmethod def rerank( @@ -84,4 +88,7 @@ def rerank( if parameters is not None: args["parameters"] = parameters - return RerankRequest(**args) + from typing import cast + + result = RerankRequest(**args) + return cast(RerankRequest, result) diff --git a/pinecone/openapi_support/api_client.py b/pinecone/openapi_support/api_client.py index d9a21278b..654687a7f 100644 --- a/pinecone/openapi_support/api_client.py +++ b/pinecone/openapi_support/api_client.py @@ -197,7 +197,7 @@ def __call_api( response=response_data, response_type=response_type, config=self.configuration, - _check_type=_check_type, + _check_type=_check_type if _check_type is not None else True, ) else: return_data = None @@ -214,7 +214,8 @@ def __call_api( if isinstance(return_data, dict): return_data["_response_info"] = response_info else: - return_data._response_info = response_info # type: ignore + # Dynamic attribute assignment on OpenAPI models + setattr(return_data, "_response_info", response_info) if _return_http_data_only: return return_data diff --git a/pinecone/openapi_support/api_client_utils.py b/pinecone/openapi_support/api_client_utils.py index b6a736d36..456926a24 100644 --- a/pinecone/openapi_support/api_client_utils.py +++ b/pinecone/openapi_support/api_client_utils.py @@ -109,7 +109,9 @@ def parameters_to_multipart(params, collection_types): :param dict collection_types: Parameter collection types :return: Parameters as list of tuple or urllib3.fields.RequestField """ - new_params = [] + from typing import Union + + new_params: list[Union[RequestField, tuple[Any, Any]]] = [] if collection_types is None: collection_types = dict for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 diff --git a/pinecone/openapi_support/api_version.py b/pinecone/openapi_support/api_version.py index c68138d9b..cc5b21d16 100644 --- a/pinecone/openapi_support/api_version.py +++ b/pinecone/openapi_support/api_version.py @@ -2,4 +2,4 @@ # Do not edit this file manually. API_VERSION = "2025-10" -APIS_REPO_SHA = "bbad89bd51d792534a9ba06a44ed1f2259f7f89f" +APIS_REPO_SHA = "d5ac93191def1d9666946d2c0e67edd3140b0f0d" diff --git a/pinecone/openapi_support/asyncio_api_client.py b/pinecone/openapi_support/asyncio_api_client.py index 92050d72c..9ea812ad5 100644 --- a/pinecone/openapi_support/asyncio_api_client.py +++ b/pinecone/openapi_support/asyncio_api_client.py @@ -161,7 +161,10 @@ async def __call_api( if response_type: Deserializer.decode_response(response_type=response_type, response=response_data) return_data = Deserializer.deserialize( - response_data, response_type, self.configuration, _check_type + response_data, + response_type, + self.configuration, + _check_type if _check_type is not None else True, ) else: return_data = None @@ -178,7 +181,8 @@ async def __call_api( if isinstance(return_data, dict): return_data["_response_info"] = response_info else: - return_data._response_info = response_info # type: ignore + # Dynamic attribute assignment on OpenAPI models + setattr(return_data, "_response_info", response_info) if _return_http_data_only: return return_data @@ -192,7 +196,9 @@ def parameters_to_multipart(self, params, collection_types): :param dict collection_types: Parameter collection types :return: Parameters as list of tuple or urllib3.fields.RequestField """ - new_params = [] + from typing import Union + + new_params: list[Union[RequestField, tuple[Any, Any]]] = [] if collection_types is None: collection_types = dict for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 @@ -374,4 +380,10 @@ async def request( def get_file_data_and_close_file(file_instance: io.IOBase) -> bytes: file_data = file_instance.read() file_instance.close() - return file_data + if isinstance(file_data, bytes): + return file_data + # If read() returns str, encode it + if isinstance(file_data, str): + return file_data.encode("utf-8") + # Fallback: convert to bytes + return bytes(file_data) if file_data is not None else b"" diff --git a/pinecone/openapi_support/deserializer.py b/pinecone/openapi_support/deserializer.py index 0fee3b941..dcba8ff8e 100644 --- a/pinecone/openapi_support/deserializer.py +++ b/pinecone/openapi_support/deserializer.py @@ -1,8 +1,11 @@ import json import re +from typing import TypeVar, Type, Any, Union, Tuple from .model_utils import deserialize_file, file_type, validate_and_convert_types +T = TypeVar("T") + class Deserializer: @staticmethod @@ -17,7 +20,12 @@ def decode_response(response_type, response): response.data = response.data.decode(encoding) @staticmethod - def deserialize(response, response_type, config, _check_type): + def deserialize( + response: Any, + response_type: Union[Tuple[Type[T], ...], Tuple[Type[Any], ...]], + config: Any, + _check_type: bool, + ) -> Union[T, Any]: """Deserializes response into an object. :param response: RESTResponse object to be deserialized. diff --git a/pinecone/openapi_support/endpoint_utils.py b/pinecone/openapi_support/endpoint_utils.py index 867232b68..0e0d2e7a7 100644 --- a/pinecone/openapi_support/endpoint_utils.py +++ b/pinecone/openapi_support/endpoint_utils.py @@ -158,7 +158,7 @@ def raise_if_invalid_inputs( config: Configuration, params_map: EndpointParamsMapDict, allowed_values: AllowedValuesDict, - validations: PropertyValidationTypedDict, + validations: Dict[Tuple[str], PropertyValidationTypedDict], openapi_types: OpenapiTypesDictType, kwargs: Dict[str, Any], ) -> None: diff --git a/pinecone/openapi_support/model_utils.py b/pinecone/openapi_support/model_utils.py index 54cd9068a..44825f5dc 100644 --- a/pinecone/openapi_support/model_utils.py +++ b/pinecone/openapi_support/model_utils.py @@ -186,12 +186,13 @@ def __new__(cls, *args, **kwargs): return None if issubclass(cls, ModelComposed) and allows_single_value_input(cls): - model_kwargs = {} + model_kwargs: dict = {} oneof_instance = get_oneof_instance(cls, model_kwargs, kwargs, model_arg=arg) return oneof_instance visited_composed_classes = kwargs.get("_visited_composed_classes", ()) - if cls.discriminator is None or cls in visited_composed_classes: + discriminator = getattr(cls, "discriminator", None) + if discriminator is None or cls in visited_composed_classes: # Use case 1: this openapi schema (cls) does not have a discriminator # Use case 2: we have already visited this class before and are sure that we # want to instantiate it this time. We have visited this class deserializing @@ -213,8 +214,9 @@ def __new__(cls, *args, **kwargs): # Get the name and value of the discriminator property. # The discriminator name is obtained from the discriminator meta-data # and the discriminator value is obtained from the input data. - discr_propertyname_py = list(cls.discriminator.keys())[0] - discr_propertyname_js = cls.attribute_map[discr_propertyname_py] + discr_propertyname_py = list(discriminator.keys())[0] + attribute_map = getattr(cls, "attribute_map", {}) + discr_propertyname_js = attribute_map[discr_propertyname_py] if discr_propertyname_js in kwargs: discr_value = kwargs[discr_propertyname_js] elif discr_propertyname_py in kwargs: @@ -263,19 +265,20 @@ def __new__(cls, *args, **kwargs): return super(OpenApiModel, cls).__new__(cls) # Build a list containing all oneOf and anyOf descendants. - oneof_anyof_classes = None - if cls._composed_schemas is not None: - oneof_anyof_classes = cls._composed_schemas.get( - "oneOf", () - ) + cls._composed_schemas.get("anyOf", ()) + oneof_anyof_classes: tuple = () + composed_schemas = getattr(cls, "_composed_schemas", None) + if composed_schemas is not None: + oneof_anyof_classes = composed_schemas.get("oneOf", ()) + composed_schemas.get( + "anyOf", () + ) oneof_anyof_child = new_cls in oneof_anyof_classes kwargs["_visited_composed_classes"] = visited_composed_classes + (cls,) - if cls._composed_schemas.get("allOf") and oneof_anyof_child: + if composed_schemas and composed_schemas.get("allOf") and oneof_anyof_child: # Validate that we can make self because when we make the # new_cls it will not include the allOf validations in self self_inst = super(OpenApiModel, cls).__new__(cls) - self_inst.__init__(*args, **kwargs) + self_inst.__init__(*args, **kwargs) # type: ignore[misc] new_inst = new_cls.__new__(new_cls, *args, **kwargs) new_inst.__init__(*args, **kwargs) @@ -295,12 +298,13 @@ def _new_from_openapi_data(cls, *args, **kwargs): return None if issubclass(cls, ModelComposed) and allows_single_value_input(cls): - model_kwargs = {} + model_kwargs: dict = {} oneof_instance = get_oneof_instance(cls, model_kwargs, kwargs, model_arg=arg) return oneof_instance visited_composed_classes = kwargs.get("_visited_composed_classes", ()) - if cls.discriminator is None or cls in visited_composed_classes: + discriminator = getattr(cls, "discriminator", None) + if discriminator is None or cls in visited_composed_classes: # Use case 1: this openapi schema (cls) does not have a discriminator # Use case 2: we have already visited this class before and are sure that we # want to instantiate it this time. We have visited this class deserializing @@ -317,13 +321,14 @@ def _new_from_openapi_data(cls, *args, **kwargs): # through Animal's discriminator because we passed in # _visited_composed_classes = (Animal,) - return cls._from_openapi_data(*args, **kwargs) + return cls._from_openapi_data(*args, **kwargs) # type: ignore[attr-defined] # Get the name and value of the discriminator property. # The discriminator name is obtained from the discriminator meta-data # and the discriminator value is obtained from the input data. - discr_propertyname_py = list(cls.discriminator.keys())[0] - discr_propertyname_js = cls.attribute_map[discr_propertyname_py] + discr_propertyname_py = list(discriminator.keys())[0] + attribute_map = getattr(cls, "attribute_map", {}) + discr_propertyname_js = attribute_map[discr_propertyname_py] if discr_propertyname_js in kwargs: discr_value = kwargs[discr_propertyname_js] elif discr_propertyname_py in kwargs: @@ -369,21 +374,22 @@ def _new_from_openapi_data(cls, *args, **kwargs): # but we know we know that we already have Dog # because it is in visited_composed_classes # so make Animal here - return cls._from_openapi_data(*args, **kwargs) + return cls._from_openapi_data(*args, **kwargs) # type: ignore[attr-defined] # Build a list containing all oneOf and anyOf descendants. - oneof_anyof_classes = None - if cls._composed_schemas is not None: - oneof_anyof_classes = cls._composed_schemas.get( - "oneOf", () - ) + cls._composed_schemas.get("anyOf", ()) + oneof_anyof_classes: tuple = () + composed_schemas = getattr(cls, "_composed_schemas", None) + if composed_schemas is not None: + oneof_anyof_classes = composed_schemas.get("oneOf", ()) + composed_schemas.get( + "anyOf", () + ) oneof_anyof_child = new_cls in oneof_anyof_classes kwargs["_visited_composed_classes"] = visited_composed_classes + (cls,) - if cls._composed_schemas.get("allOf") and oneof_anyof_child: + if composed_schemas and composed_schemas.get("allOf") and oneof_anyof_child: # Validate that we can make self because when we make the # new_cls it will not include the allOf validations in self - self_inst = cls._from_openapi_data(*args, **kwargs) # noqa: F841 + self_inst = cls._from_openapi_data(*args, **kwargs) # type: ignore[attr-defined] # noqa: F841 new_inst = new_cls._new_from_openapi_data(*args, **kwargs) return new_inst @@ -787,18 +793,28 @@ def check_allowed_values(allowed_values, input_variable_path, input_values): """ these_allowed_values = list(allowed_values[input_variable_path].values()) if isinstance(input_values, list) and not set(input_values).issubset(set(these_allowed_values)): - invalid_values = (", ".join(map(str, set(input_values) - set(these_allowed_values))),) + invalid_values_tuple = (", ".join(map(str, set(input_values) - set(these_allowed_values))),) raise PineconeApiValueError( "Invalid values for `%s` [%s], must be a subset of [%s]" - % (input_variable_path[0], invalid_values, ", ".join(map(str, these_allowed_values))) + % ( + input_variable_path[0], + invalid_values_tuple, + ", ".join(map(str, these_allowed_values)), + ) ) elif isinstance(input_values, dict) and not set(input_values.keys()).issubset( set(these_allowed_values) ): - invalid_values = ", ".join(map(str, set(input_values.keys()) - set(these_allowed_values))) + invalid_values_str: str = ", ".join( + map(str, set(input_values.keys()) - set(these_allowed_values)) + ) raise PineconeApiValueError( "Invalid keys in `%s` [%s], must be a subset of [%s]" - % (input_variable_path[0], invalid_values, ", ".join(map(str, these_allowed_values))) + % ( + input_variable_path[0], + invalid_values_str, + ", ".join(map(str, these_allowed_values)), + ) ) elif not isinstance(input_values, (list, dict)) and input_values not in these_allowed_values: raise PineconeApiValueError( @@ -1059,6 +1075,16 @@ def get_discriminated_classes(cls): def get_possible_classes(cls, from_server_context): # TODO: lru_cache this + from typing import Any + + # Handle Any specially - it accepts any type + if cls is Any: + return [Any] + + # Handle cases where cls might not be a class (e.g., None, string, etc.) + if not isinstance(cls, type): + return [cls] if cls is not None else [] + possible_classes = [cls] if from_server_context: return possible_classes @@ -1091,8 +1117,10 @@ def get_required_type_classes(required_types_mixed, spec_property_naming): child_types_mixed (list/dict/tuple): describes the valid child types """ - valid_classes = [] - child_req_types_by_current_type = {} + from typing import Any, Type, get_origin + + valid_classes: list[Type[Any]] = [] + child_req_types_by_current_type: dict[Type[Any], Any] = {} for required_type in required_types_mixed: if isinstance(required_type, list): @@ -1105,7 +1133,47 @@ def get_required_type_classes(required_types_mixed, spec_property_naming): valid_classes.append(dict) child_req_types_by_current_type[dict] = required_type[str] else: - valid_classes.extend(get_possible_classes(required_type, spec_property_naming)) + # Handle typing generics like Dict[str, Any], List[str], etc. + # by converting them to their built-in equivalents + # Check if it's a typing generic by looking for __origin__ or __args__ + if hasattr(required_type, "__origin__") or ( + hasattr(required_type, "__args__") and required_type.__args__ + ): + try: + origin = get_origin(required_type) + if origin is dict: + valid_classes.append(dict) + # Extract value type from Dict[K, V] - value type is args[1] + from typing import get_args + + args = get_args(required_type) + if len(args) >= 2: + # Store the value type for child type checking + child_req_types_by_current_type[dict] = (args[1],) + else: + child_req_types_by_current_type[dict] = required_type + elif origin is list: + valid_classes.append(list) + # Extract element type from List[T] - element type is args[0] + from typing import get_args + + args = get_args(required_type) + if len(args) >= 1: + child_req_types_by_current_type[list] = (args[0],) + else: + child_req_types_by_current_type[list] = required_type + elif origin is tuple: + valid_classes.append(tuple) + child_req_types_by_current_type[tuple] = required_type + else: + valid_classes.extend( + get_possible_classes(required_type, spec_property_naming) + ) + except (TypeError, AttributeError): + # Not a typing generic, treat as regular class + valid_classes.extend(get_possible_classes(required_type, spec_property_naming)) + else: + valid_classes.extend(get_possible_classes(required_type, spec_property_naming)) return tuple(valid_classes), child_req_types_by_current_type @@ -1316,7 +1384,10 @@ def deserialize_file(response_data, configuration, content_disposition=None): os.remove(path) if content_disposition: - filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', content_disposition).group(1) + match = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', content_disposition) + if match is None: + raise ValueError("Could not extract filename from content_disposition") + filename = match.group(1) path = os.path.join(os.path.dirname(path), filename) with open(path, "wb") as f: @@ -1325,8 +1396,8 @@ def deserialize_file(response_data, configuration, content_disposition=None): response_data = response_data.encode("utf-8") f.write(response_data) - f = open(path, "rb") - return f + file_handle: io.BufferedReader = open(path, "rb") + return file_handle def attempt_convert_item( @@ -1435,6 +1506,12 @@ def is_valid_type(input_class_simple, valid_classes): Returns: bool """ + from typing import Any + + # If Any is in valid_classes, accept any type + if Any in valid_classes: + return True + valid_type = input_class_simple in valid_classes if not valid_type and ( issubclass(input_class_simple, OpenApiModel) or input_class_simple is none_type @@ -1584,7 +1661,9 @@ def model_to_dict(model_instance, serialize=True): serialize (bool): if True, the keys in the dict will be values from attribute_map """ - result = {} + from typing import Any + + result: dict[str, Any] = {} model_instances = [model_instance] if hasattr(model_instance, "_composed_schemas") and model_instance._composed_schemas: @@ -1800,7 +1879,9 @@ def get_anyof_instances(self, model_args, constant_args): Returns anyof_instances (list) """ - anyof_instances = [] + from typing import Any + + anyof_instances: list[Any] = [] if len(self._composed_schemas["anyOf"]) == 0: return anyof_instances diff --git a/pinecone/openapi_support/rest_urllib3.py b/pinecone/openapi_support/rest_urllib3.py index e25d80a00..f68341e55 100644 --- a/pinecone/openapi_support/rest_urllib3.py +++ b/pinecone/openapi_support/rest_urllib3.py @@ -178,6 +178,7 @@ def request( content_type = headers.get("Content-Type", "").lower() if content_type == "" or ("json" in content_type): + request_body: str | bytes | None = None if body is None: request_body = None else: diff --git a/pinecone/openapi_support/retry_aiohttp.py b/pinecone/openapi_support/retry_aiohttp.py index 2b3019e7e..9905ef8e0 100644 --- a/pinecone/openapi_support/retry_aiohttp.py +++ b/pinecone/openapi_support/retry_aiohttp.py @@ -41,4 +41,4 @@ def get_timeout( """Return timeout with exponential backoff.""" jitter = random.uniform(0, 0.1) timeout = self._start_timeout * (2 ** (attempt - 1)) - return min(timeout + jitter, self._max_timeout) + return float(min(timeout + jitter, self._max_timeout)) diff --git a/pinecone/openapi_support/serializer.py b/pinecone/openapi_support/serializer.py index fa59396ae..52bf5ecda 100644 --- a/pinecone/openapi_support/serializer.py +++ b/pinecone/openapi_support/serializer.py @@ -12,7 +12,13 @@ class Serializer: def get_file_data_and_close_file(file_instance: io.IOBase) -> bytes: file_data = file_instance.read() file_instance.close() - return file_data + if isinstance(file_data, bytes): + return file_data + # If read() returns str, encode it + if isinstance(file_data, str): + return file_data.encode("utf-8") + # Fallback: convert to bytes + return bytes(file_data) if file_data is not None else b"" @classmethod def sanitize_for_serialization(cls, obj) -> Any: diff --git a/pinecone/pinecone.py b/pinecone/pinecone.py index 00fd4cfee..3fd018903 100644 --- a/pinecone/pinecone.py +++ b/pinecone/pinecone.py @@ -1,5 +1,5 @@ import logging -from typing import Optional, Dict, Union, TYPE_CHECKING, Any +from typing import Optional, Dict, Union, TYPE_CHECKING, Any, NoReturn from multiprocessing import cpu_count import warnings @@ -18,6 +18,8 @@ from pinecone.db_data import _Index as Index, _IndexAsyncio as IndexAsyncio from pinecone.db_control.index_host_store import IndexHostStore from pinecone.core.openapi.db_control.api.manage_indexes_api import ManageIndexesApi + from pinecone.inference import Inference + from pinecone.db_control import DBControl from pinecone.db_control.types import CreateIndexForModelEmbedTypedDict, ConfigureIndexEmbed from pinecone.db_control.models.serverless_spec import ( ReadCapacityDict, @@ -72,7 +74,7 @@ def __init__( additional_headers: Optional[Dict[str, str]] = {}, pool_threads: Optional[int] = None, **kwargs, - ): + ) -> None: """ The ``Pinecone`` class is the main entry point for interacting with Pinecone via this Python SDK. Instances of the ``Pinecone`` class are used to manage and interact with Pinecone resources such as @@ -247,16 +249,16 @@ def __init__( self._pool_threads = pool_threads """ :meta private: """ - self._inference = None # Lazy initialization + self._inference: Optional["Inference"] = None # Lazy initialization """ :meta private: """ - self._db_control = None # Lazy initialization + self._db_control: Optional["DBControl"] = None # Lazy initialization """ :meta private: """ super().__init__() # Initialize PluginAware @property - def inference(self): + def inference(self) -> "Inference": """ Inference is a namespace where an instance of the `pinecone.inference.Inference` class is lazily created and cached. """ @@ -271,7 +273,7 @@ def inference(self): return self._inference @property - def db(self): + def db(self) -> "DBControl": """ DBControl is a namespace where an instance of the `pinecone.db_control.DBControl` class is lazily created and cached. """ @@ -413,7 +415,7 @@ def create_index_from_backup( timeout=timeout, ) - def delete_index(self, name: str, timeout: Optional[int] = None): + def delete_index(self, name: str, timeout: Optional[int] = None) -> None: return self.db.index.delete(name=name, timeout=timeout) def list_indexes(self) -> "IndexList": @@ -441,7 +443,7 @@ def configure_index( "ReadCapacityDedicatedSpec", ] ] = None, - ): + ) -> None: return self.db.index.configure( name=name, replicas=replicas, @@ -461,8 +463,11 @@ def list_collections(self) -> "CollectionList": def delete_collection(self, name: str) -> None: return self.db.collection.delete(name=name) - def describe_collection(self, name: str): - return self.db.collection.describe(name=name) + def describe_collection(self, name: str) -> Dict[str, Any]: + from typing import cast + + result = self.db.collection.describe(name=name) + return cast(Dict[str, Any], result) @require_kwargs def create_backup( @@ -503,12 +508,12 @@ def describe_restore_job(self, *, job_id: str) -> "RestoreJobModel": return self.db.restore_job.describe(job_id=job_id) @staticmethod - def from_texts(*args, **kwargs): + def from_texts(*args: Any, **kwargs: Any) -> NoReturn: """:meta private:""" raise AttributeError(_build_langchain_attribute_error_message("from_texts")) @staticmethod - def from_documents(*args, **kwargs): + def from_documents(*args: Any, **kwargs: Any) -> NoReturn: """:meta private:""" raise AttributeError(_build_langchain_attribute_error_message("from_documents")) diff --git a/pinecone/pinecone_asyncio.py b/pinecone/pinecone_asyncio.py index 8d1ba548f..ab7345a40 100644 --- a/pinecone/pinecone_asyncio.py +++ b/pinecone/pinecone_asyncio.py @@ -1,6 +1,7 @@ import logging import warnings from typing import Optional, Dict, Union, TYPE_CHECKING, Any +from typing_extensions import Self from pinecone.config import PineconeConfig, ConfigBuilder @@ -12,6 +13,8 @@ if TYPE_CHECKING: from pinecone.db_control.types import ConfigureIndexEmbed, CreateIndexForModelEmbedTypedDict from pinecone.db_data import _IndexAsyncio + from pinecone.inference import AsyncioInference + from pinecone.db_control.db_control_asyncio import DBControlAsyncio from pinecone.db_control.enums import ( Metric, VectorType, @@ -87,7 +90,7 @@ def __init__( ssl_verify: Optional[bool] = None, additional_headers: Optional[Dict[str, str]] = {}, **kwargs, - ): + ) -> None: """ Initialize the ``PineconeAsyncio`` client. @@ -136,19 +139,22 @@ def __init__( self._openapi_config = ConfigBuilder.build_openapi_config(self._config, **kwargs) """ :meta private: """ - self._inference = None # Lazy initialization + self._inference: Optional["AsyncioInference"] = None # Lazy initialization """ :meta private: """ - self._db_control = None # Lazy initialization + self._db_control: Optional["DBControlAsyncio"] = None # Lazy initialization """ :meta private: """ - async def __aenter__(self): + async def __aenter__(self) -> Self: return self - async def __aexit__(self, exc_type, exc_value, traceback): + async def __aexit__( + self, exc_type: Optional[type], exc_value: Optional[BaseException], traceback: Optional[Any] + ) -> Optional[bool]: await self.close() + return None - async def close(self): + async def close(self) -> None: """Cleanup resources used by the Pinecone client. This method should be called when the client is no longer needed so that @@ -189,7 +195,7 @@ async def main(): await self.db._index_api.api_client.close() @property - def inference(self): + def inference(self) -> "AsyncioInference": """Dynamically create and cache the AsyncioInference instance.""" if self._inference is None: from pinecone.inference import AsyncioInference @@ -198,7 +204,7 @@ def inference(self): return self._inference @property - def db(self): + def db(self) -> "DBControlAsyncio": """ db is a namespace where an instance of the ``pinecone.db_control.DBControlAsyncio`` class is lazily created and cached. """ @@ -218,7 +224,10 @@ def index_host_store(self) -> "IndexHostStore": DeprecationWarning, stacklevel=2, ) - return self.db.index._index_host_store + # IndexResourceAsyncio doesn't have _index_host_store, access the singleton directly + from pinecone.db_control.index_host_store import IndexHostStore + + return IndexHostStore() @property def index_api(self) -> "AsyncioManageIndexesApi": @@ -312,7 +321,7 @@ async def create_index_from_backup( timeout=timeout, ) - async def delete_index(self, name: str, timeout: Optional[int] = None): + async def delete_index(self, name: str, timeout: Optional[int] = None) -> None: return await self.db.index.delete(name=name, timeout=timeout) async def list_indexes(self) -> "IndexList": @@ -340,7 +349,7 @@ async def configure_index( "ReadCapacityDedicatedSpec", ] ] = None, - ): + ) -> None: return await self.db.index.configure( name=name, replicas=replicas, @@ -351,16 +360,16 @@ async def configure_index( read_capacity=read_capacity, ) - async def create_collection(self, name: str, source: str): + async def create_collection(self, name: str, source: str) -> None: return await self.db.collection.create(name=name, source=source) async def list_collections(self) -> "CollectionList": return await self.db.collection.list() - async def delete_collection(self, name: str): + async def delete_collection(self, name: str) -> None: return await self.db.collection.delete(name=name) - async def describe_collection(self, name: str): + async def describe_collection(self, name: str) -> Dict[str, Any]: return await self.db.collection.describe(name=name) @require_kwargs diff --git a/pinecone/pinecone_interface_asyncio.py b/pinecone/pinecone_interface_asyncio.py index 3c344ffbb..cbbe52ad1 100644 --- a/pinecone/pinecone_interface_asyncio.py +++ b/pinecone/pinecone_interface_asyncio.py @@ -548,7 +548,7 @@ async def main(): pass @abstractmethod - def create_index_from_backup( + async def create_index_from_backup( self, *, name: str, diff --git a/pinecone/utils/check_kwargs.py b/pinecone/utils/check_kwargs.py index 17038b1ec..89d918df7 100644 --- a/pinecone/utils/check_kwargs.py +++ b/pinecone/utils/check_kwargs.py @@ -1,8 +1,11 @@ +from __future__ import annotations + import inspect import logging +from typing import Callable, Any -def check_kwargs(caller, given): +def check_kwargs(caller: Callable[..., Any], given: set[str]) -> None: argspec = inspect.getfullargspec(caller) diff = set(given).difference(argspec.args) if diff: diff --git a/pinecone/utils/error_handling.py b/pinecone/utils/error_handling.py index c18090eb2..bacc03b1f 100644 --- a/pinecone/utils/error_handling.py +++ b/pinecone/utils/error_handling.py @@ -1,5 +1,10 @@ import inspect from functools import wraps +from typing import TypeVar, Callable +from typing_extensions import ParamSpec + +P = ParamSpec("P") +R = TypeVar("R") class ProtocolError(Exception): @@ -8,9 +13,16 @@ class ProtocolError(Exception): pass -def validate_and_convert_errors(func): +def validate_and_convert_errors(func: Callable[P, R]) -> Callable[P, R]: + """ + Decorator that validates and converts urllib3 protocol errors to ProtocolError. + + :param func: The function to wrap + :return: The wrapped function with the same signature + """ + @wraps(func) - def inner_func(*args, **kwargs): + def inner_func(*args: P.args, **kwargs: P.kwargs) -> R: try: return func(*args, **kwargs) except Exception as e: @@ -31,5 +43,5 @@ def inner_func(*args, **kwargs): # Override signature sig = inspect.signature(func) - inner_func.__signature__ = sig + inner_func.__signature__ = sig # type: ignore[attr-defined] return inner_func diff --git a/pinecone/utils/lazy_imports.py b/pinecone/utils/lazy_imports.py index 6bb3d15b0..c48d33041 100644 --- a/pinecone/utils/lazy_imports.py +++ b/pinecone/utils/lazy_imports.py @@ -24,18 +24,18 @@ def __init__(self, original_module, lazy_imports): self._lazy_imports = lazy_imports self._loaded_attrs = {} - @property - def __doc__(self): - return self._original_module.__doc__ - - @property - def __dict__(self): - # Get the base dictionary from the original module - base_dict = self._original_module.__dict__.copy() - # Add lazy-loaded items - for name, value in self._loaded_attrs.items(): - base_dict[name] = value - return base_dict + def __getattribute__(self, name): + if name == "__doc__": + return object.__getattribute__(self, "_original_module").__doc__ + if name == "__dict__": + # Get the base dictionary from the original module + base_dict = object.__getattribute__(self, "_original_module").__dict__.copy() + # Add lazy-loaded items + loaded_attrs = object.__getattribute__(self, "_loaded_attrs") + for name, value in loaded_attrs.items(): + base_dict[name] = value + return base_dict + return object.__getattribute__(self, name) def __dir__(self): # Get the base directory listing from the original module diff --git a/pinecone/utils/require_kwargs.py b/pinecone/utils/require_kwargs.py index 9321f4689..1c2649aa8 100644 --- a/pinecone/utils/require_kwargs.py +++ b/pinecone/utils/require_kwargs.py @@ -1,10 +1,22 @@ import functools import inspect +from typing import TypeVar, Callable +from typing_extensions import ParamSpec +P = ParamSpec("P") +R = TypeVar("R") + + +def require_kwargs(func: Callable[P, R]) -> Callable[P, R]: + """ + Decorator that requires all arguments (except self) to be passed as keyword arguments. + + :param func: The function to wrap + :return: The wrapped function with the same signature + """ -def require_kwargs(func): @functools.wraps(func) - def wrapper(*args, **kwargs): + def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: if len(args) > 1: # First arg is self param_names = list(inspect.signature(func).parameters.keys())[1:] # Skip self raise TypeError( diff --git a/tests/integration/grpc/db/data/test_query_future.py b/tests/integration/grpc/db/data/test_query_future.py index 09cd59104..f657aaf05 100644 --- a/tests/integration/grpc/db/data/test_query_future.py +++ b/tests/integration/grpc/db/data/test_query_future.py @@ -65,8 +65,7 @@ def poll_until_query_has_results( time_waited += wait_per_iteration raise TimeoutError( - f"Timeout waiting for query to return {expected_count} results " - f"after {time_waited} seconds" + f"Timeout waiting for query to return {expected_count} results after {time_waited} seconds" ) diff --git a/tests/integration/grpc/db/data/test_timeouts.py b/tests/integration/grpc/db/data/test_timeouts.py index a2cdbc9b3..ca51b2354 100644 --- a/tests/integration/grpc/db/data/test_timeouts.py +++ b/tests/integration/grpc/db/data/test_timeouts.py @@ -412,5 +412,5 @@ def test_fetch_with_default_timeout(self, local_idx: GRPCIndex): assert result.vectors["1"].id == "1" assert result.vectors["2"].id == "2" assert result.vectors["3"].id == "3" - assert result.usage.read_units == 1 + assert result.usage["read_units"] == 1 assert result.namespace == "testnamespace" diff --git a/tests/integration/helpers/helpers.py b/tests/integration/helpers/helpers.py index f34ce36c0..b6b80cda6 100644 --- a/tests/integration/helpers/helpers.py +++ b/tests/integration/helpers/helpers.py @@ -178,8 +178,7 @@ def poll_until_lsn_reconciled( while not done: logger.debug( - f"Polling for LSN reconciliation. Target LSN: {target_lsn}, " - f"total time: {total_time}s" + f"Polling for LSN reconciliation. Target LSN: {target_lsn}, total time: {total_time}s" ) # Try query as a lightweight operation to check LSN