From 320afc178b920c440547fa04ae7e4589b03ffd23 Mon Sep 17 00:00:00 2001 From: Gareth Date: Thu, 20 Jun 2024 11:53:42 -0400 Subject: [PATCH] Add Black Formatting and Linting (#355) ## Problem PRs often include formatting changes that makes it difficult to identify changes in functionality ## Solution - Format all code with `black` - Introduce linting `black` linting workflow ## Type of Change - [ ] Bug fix (non-breaking change which fixes an issue) - [ ] New feature (non-breaking change which adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) - [ ] This change requires a documentation update - [ ] Infrastructure change (CI configs, etc) - [ ] Non-code change (docs, etc) - [x] None of the above: (explain here) --- .github/workflows/lint.yaml | 9 + .pre-commit-config.yaml | 17 +- pinecone/__init__.py | 3 +- pinecone/config/__init__.py | 4 +- pinecone/config/config.py | 22 +- pinecone/config/openapi.py | 10 +- pinecone/config/pinecone_config.py | 19 +- pinecone/control/__init__.py | 2 +- pinecone/control/index_host_store.py | 5 +- pinecone/control/langchain_import_warnings.py | 6 +- pinecone/control/pinecone.py | 159 +-- pinecone/core/client/api/data_plane_api.py | 916 ++++++------------ .../core/client/api/manage_indexes_api.py | 912 ++++++----------- pinecone/core/client/api_client.py | 533 +++++----- pinecone/core/client/apis/__init__.py | 1 - pinecone/core/client/configuration.py | 248 ++--- pinecone/core/client/exceptions.py | 17 +- pinecone/core/client/model/collection_list.py | 100 +- .../core/client/model/collection_model.py | 126 +-- .../client/model/configure_index_request.py | 100 +- .../model/configure_index_request_spec.py | 100 +- .../model/configure_index_request_spec_pod.py | 103 +- .../client/model/create_collection_request.py | 109 ++- .../core/client/model/create_index_request.py | 132 +-- pinecone/core/client/model/delete_request.py | 110 ++- .../model/describe_index_stats_request.py | 98 +- .../model/describe_index_stats_response.py | 112 ++- pinecone/core/client/model/error_response.py | 104 +- .../core/client/model/error_response_error.py | 141 +-- pinecone/core/client/model/fetch_response.py | 110 ++- pinecone/core/client/model/index_list.py | 100 +- pinecone/core/client/model/index_model.py | 144 +-- .../core/client/model/index_model_spec.py | 106 +- .../core/client/model/index_model_status.py | 117 ++- pinecone/core/client/model/list_item.py | 98 +- pinecone/core/client/model/list_response.py | 116 ++- .../core/client/model/namespace_summary.py | 98 +- pinecone/core/client/model/pagination.py | 98 +- pinecone/core/client/model/pod_spec.py | 149 +-- .../client/model/pod_spec_metadata_config.py | 98 +- pinecone/core/client/model/protobuf_any.py | 102 +- .../core/client/model/protobuf_null_value.py | 82 +- pinecone/core/client/model/query_request.py | 147 +-- pinecone/core/client/model/query_response.py | 116 ++- pinecone/core/client/model/query_vector.py | 122 ++- pinecone/core/client/model/rpc_status.py | 108 ++- pinecone/core/client/model/scored_vector.py | 119 ++- pinecone/core/client/model/serverless_spec.py | 107 +- .../core/client/model/single_query_results.py | 104 +- pinecone/core/client/model/sparse_values.py | 105 +- pinecone/core/client/model/update_request.py | 122 ++- pinecone/core/client/model/upsert_request.py | 104 +- pinecone/core/client/model/upsert_response.py | 98 +- pinecone/core/client/model/usage.py | 98 +- pinecone/core/client/model/vector.py | 118 ++- pinecone/core/client/model_utils.py | 638 +++++------- pinecone/core/client/rest.py | 326 ++++--- .../core/grpc/protos/vector_service_pb2.py | 439 +++++---- .../core/grpc/protos/vector_service_pb2.pyi | 252 ++++- .../grpc/protos/vector_service_pb2_grpc.py | 412 ++++---- pinecone/data/__init__.py | 14 +- pinecone/data/errors.py | 7 + pinecone/data/index.py | 66 +- pinecone/data/sparse_vector_factory.py | 19 +- pinecone/data/vector_factory.py | 16 +- pinecone/deprecation_warnings.py | 26 +- pinecone/exceptions.py | 6 +- pinecone/grpc/__init__.py | 4 +- pinecone/grpc/base.py | 6 +- pinecone/grpc/index_grpc.py | 53 +- pinecone/grpc/pinecone.py | 33 +- pinecone/grpc/sparse_values_factory.py | 19 +- pinecone/grpc/utils.py | 18 +- pinecone/grpc/vector_factory_grpc.py | 36 +- pinecone/models/__init__.py | 20 +- pinecone/models/collection_description.py | 3 +- pinecone/models/collection_list.py | 15 +- pinecone/models/index_description.py | 15 +- pinecone/models/index_list.py | 13 +- pinecone/models/list_response.py | 2 + pinecone/models/pod_spec.py | 7 +- pinecone/models/serverless_spec.py | 3 +- pinecone/utils/__init__.py | 2 +- pinecone/utils/check_kwargs.py | 1 + pinecone/utils/convert_to_list.py | 7 +- pinecone/utils/deprecation_notice.py | 3 +- pinecone/utils/docslinks.py | 6 +- pinecone/utils/error_handling.py | 1 + pinecone/utils/fix_tuple_length.py | 2 +- pinecone/utils/normalize_host.py | 6 +- pinecone/utils/setup_openapi_client.py | 16 +- pinecone/utils/user_agent.py | 8 +- pinecone/utils/version.py | 4 +- poetry.lock | 192 +++- pyproject.toml | 2 + scripts/cleanup-all.py | 15 +- scripts/create-index-legacy.py | 58 +- scripts/create.py | 34 +- scripts/delete-all-collections.py | 10 +- scripts/delete.py | 15 +- scripts/generate_usage.py | 69 +- tests/dependency/grpc/test_sanity.py | 27 +- tests/dependency/rest/test_sanity.py | 29 +- tests/integration/control/pod/conftest.py | 86 +- .../control/pod/test_collections.py | 80 +- .../control/pod/test_collections_errors.py | 85 +- .../control/pod/test_configure_pod_index.py | 7 +- .../control/serverless/conftest.py | 57 +- .../test_create_index_api_errors.py | 17 +- .../test_create_index_sl_happy_path.py | 17 +- .../serverless/test_create_index_timeouts.py | 18 +- .../test_create_index_type_errors.py | 7 +- .../control/serverless/test_describe_index.py | 23 +- .../control/serverless/test_list_indexes.py | 9 +- tests/integration/data/conftest.py | 70 +- tests/integration/data/seed.py | 51 +- tests/integration/data/test_fetch.py | 78 +- tests/integration/data/test_initialization.py | 23 +- tests/integration/data/test_list.py | 52 +- tests/integration/data/test_list_errors.py | 13 +- .../data/test_openapi_configuration.py | 9 +- tests/integration/data/test_query.py | 116 +-- tests/integration/data/test_query_errors.py | 22 +- tests/integration/data/test_upsert.py | 91 +- tests/integration/data/test_upsert_errors.py | 188 ++-- tests/integration/data/utils.py | 2 +- tests/integration/helpers/__init__.py | 6 +- tests/integration/helpers/helpers.py | 50 +- tests/integration/proxy_config/conftest.py | 70 +- .../proxy_config/test_proxy_settings.py | 46 +- tests/integration/test_upsert.py | 44 +- tests/unit/data/test_datetime_parsing.py | 25 +- tests/unit/data/test_vector_factory.py | 190 ++-- tests/unit/models/test_collection_list.py | 34 +- tests/unit/models/test_index_list.py | 55 +- tests/unit/test_config.py | 83 +- tests/unit/test_config_builder.py | 32 +- tests/unit/test_control.py | 150 +-- tests/unit/test_index.py | 12 +- tests/unit/test_index_initialization.py | 59 +- tests/unit/test_langchain_helpful_errors.py | 4 +- tests/unit/test_version.py | 3 +- tests/unit/utils/test_convert_to_list.py | 23 +- tests/unit/utils/test_docs_links.py | 5 +- tests/unit/utils/test_normalize_host.py | 18 +- tests/unit/utils/test_setup_openapi_client.py | 71 +- tests/unit/utils/test_user_agent.py | 33 +- tests/unit_grpc/conftest.py | 1 + .../test_grpc_index_initialization.py | 65 +- tests/unit_grpc/test_grpc_index_query.py | 1 + tests/unit_grpc/test_sparse_values_factory.py | 84 +- tests/unit_grpc/test_vector_factory_grpc.py | 124 +-- 152 files changed, 6608 insertions(+), 5880 deletions(-) create mode 100644 .github/workflows/lint.yaml diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml new file mode 100644 index 00000000..7325737e --- /dev/null +++ b/.github/workflows/lint.yaml @@ -0,0 +1,9 @@ +name: "Lint" +on: [push, pull_request] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: psf/black@stable \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 41338957..365d8a2c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,12 +1,13 @@ repos: -- repo: https://github.com/pre-commit/pre-commit-hooks + - repo: https://github.com/pre-commit/pre-commit-hooks rev: v3.2.0 hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml - - id: check-added-large-files -- repo: https://github.com/psf/black - rev: 23.9.1 + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + - repo: https://github.com/psf/black-pre-commit-mirror + rev: 24.4.2 hooks: - - id: black + - id: black + language_version: python3.12 diff --git a/pinecone/__init__.py b/pinecone/__init__.py index b94720dd..499e5307 100644 --- a/pinecone/__init__.py +++ b/pinecone/__init__.py @@ -1,6 +1,7 @@ """ .. include:: ../README.md """ + import warnings from tqdm import TqdmExperimentalWarning @@ -17,4 +18,4 @@ IndexModel, ) -from .utils import __version__ \ No newline at end of file +from .utils import __version__ diff --git a/pinecone/config/__init__.py b/pinecone/config/__init__.py index 81d51fa0..a13a602a 100644 --- a/pinecone/config/__init__.py +++ b/pinecone/config/__init__.py @@ -4,5 +4,5 @@ from .config import ConfigBuilder, Config from .pinecone_config import PineconeConfig -if (os.getenv('PINECONE_DEBUG') != None): - logging.basicConfig(level=logging.DEBUG) \ No newline at end of file +if os.getenv("PINECONE_DEBUG") != None: + logging.basicConfig(level=logging.DEBUG) diff --git a/pinecone/config/config.py b/pinecone/config/config.py index 5b5f111c..df1c4e4f 100644 --- a/pinecone/config/config.py +++ b/pinecone/config/config.py @@ -7,6 +7,7 @@ from pinecone.utils import normalize_host from pinecone.utils.constants import SOURCE_TAG + class Config(NamedTuple): api_key: str = "" host: str = "" @@ -17,6 +18,7 @@ class Config(NamedTuple): additional_headers: Optional[Dict[str, str]] = {} source_tag: Optional[str] = None + class ConfigBuilder: """ @@ -56,27 +58,29 @@ def build( raise PineconeConfigurationError("You haven't specified a host.") return Config(api_key, host, proxy_url, proxy_headers, ssl_ca_certs, ssl_verify, additional_headers, source_tag) - + @staticmethod def build_openapi_config( config: Config, openapi_config: Optional[OpenApiConfiguration] = None, **kwargs ) -> OpenApiConfiguration: if openapi_config: - openapi_config = OpenApiConfigFactory.copy(openapi_config=openapi_config, api_key=config.api_key, host=config.host) + openapi_config = OpenApiConfigFactory.copy( + openapi_config=openapi_config, api_key=config.api_key, host=config.host + ) elif openapi_config is None: openapi_config = OpenApiConfigFactory.build(api_key=config.api_key, host=config.host) # Check if value passed before overriding any values present - # in the openapi_config. This means if the user has passed + # in the openapi_config. This means if the user has passed # an openapi_config object and a kwarg for the same setting, # the kwarg will take precedence. - if (config.proxy_url): + if config.proxy_url: openapi_config.proxy = config.proxy_url - if (config.proxy_headers): + if config.proxy_headers: openapi_config.proxy_headers = config.proxy_headers - if (config.ssl_ca_certs): + if config.ssl_ca_certs: openapi_config.ssl_ca_cert = config.ssl_ca_certs - if (config.ssl_verify != None): + if config.ssl_verify != None: openapi_config.verify_ssl = config.ssl_verify - - return openapi_config \ No newline at end of file + + return openapi_config diff --git a/pinecone/config/openapi.py b/pinecone/config/openapi.py index c8ba117a..1ff4b7f5 100644 --- a/pinecone/config/openapi.py +++ b/pinecone/config/openapi.py @@ -23,15 +23,15 @@ def build(cls, api_key: str, host: Optional[str] = None, **kwargs): openapi_config.ssl_ca_cert = certifi.where() openapi_config.socket_options = cls._get_socket_options() return openapi_config - + @classmethod def copy(cls, openapi_config: OpenApiConfiguration, api_key: str, host: str) -> OpenApiConfiguration: - ''' - Copy a user-supplied openapi configuration and update it with the user's api key and host. + """ + Copy a user-supplied openapi configuration and update it with the user's api key and host. If they have not specified other socket configuration, we will use the default values. - We expect these objects are being passed mainly a vehicle for proxy configuration, so + We expect these objects are being passed mainly a vehicle for proxy configuration, so we don't modify those settings. - ''' + """ copied = copy.deepcopy(openapi_config) copied.api_key = {"ApiKeyAuth": api_key} diff --git a/pinecone/config/pinecone_config.py b/pinecone/config/pinecone_config.py index 3e2e7d82..2960bb9e 100644 --- a/pinecone/config/pinecone_config.py +++ b/pinecone/config/pinecone_config.py @@ -9,16 +9,21 @@ DEFAULT_CONTROLLER_HOST = "https://api.pinecone.io" -class PineconeConfig(): +class PineconeConfig: @staticmethod - def build(api_key: Optional[str] = None, host: Optional[str] = None, additional_headers: Optional[Dict[str, str]] = {}, **kwargs) -> Config: + def build( + api_key: Optional[str] = None, + host: Optional[str] = None, + additional_headers: Optional[Dict[str, str]] = {}, + **kwargs, + ) -> Config: host = host or kwargs.get("host") or os.getenv("PINECONE_CONTROLLER_HOST") or DEFAULT_CONTROLLER_HOST headers_json = os.getenv("PINECONE_ADDITIONAL_HEADERS") if headers_json: - try: - headers = json.loads(headers_json) - additional_headers = additional_headers or headers - except Exception as e: - logger.warn(f'Ignoring PINECONE_ADDITIONAL_HEADERS: {e}') + try: + headers = json.loads(headers_json) + additional_headers = additional_headers or headers + except Exception as e: + logger.warn(f"Ignoring PINECONE_ADDITIONAL_HEADERS: {e}") return ConfigBuilder.build(api_key=api_key, host=host, additional_headers=additional_headers, **kwargs) diff --git a/pinecone/control/__init__.py b/pinecone/control/__init__.py index f083730e..4c739f1a 100644 --- a/pinecone/control/__init__.py +++ b/pinecone/control/__init__.py @@ -1 +1 @@ -from .pinecone import Pinecone \ No newline at end of file +from .pinecone import Pinecone diff --git a/pinecone/control/index_host_store.py b/pinecone/control/index_host_store.py index cf2e1df4..89763066 100644 --- a/pinecone/control/index_host_store.py +++ b/pinecone/control/index_host_store.py @@ -4,6 +4,7 @@ from pinecone.core.client.exceptions import PineconeException from pinecone.utils import normalize_host + class SingletonMeta(type): _instances: Dict[str, str] = {} @@ -42,5 +43,7 @@ def get_host(self, api: IndexOperationsApi, config: Config, index_name: str) -> description = api.describe_index(index_name) self.set_host(config, index_name, description.host) if not self.key_exists(key): - raise PineconeException(f"Could not get host for index: {index_name}. Call describe_index('{index_name}') to check the current status.") + raise PineconeException( + f"Could not get host for index: {index_name}. Call describe_index('{index_name}') to check the current status." + ) return self._indexHosts[key] diff --git a/pinecone/control/langchain_import_warnings.py b/pinecone/control/langchain_import_warnings.py index fb4a00ca..ac55ef57 100644 --- a/pinecone/control/langchain_import_warnings.py +++ b/pinecone/control/langchain_import_warnings.py @@ -1,9 +1,9 @@ from pinecone.utils import docslinks -KB_ARTICLE = docslinks['LANGCHAIN_IMPORT_KB_ARTICLE'] -GITHUB_REPO = docslinks['GITHUB_REPO'] +KB_ARTICLE = docslinks["LANGCHAIN_IMPORT_KB_ARTICLE"] +GITHUB_REPO = docslinks["GITHUB_REPO"] + def _build_langchain_attribute_error_message(method_name: str): return f"""{method_name} is not a top-level attribute of the Pinecone class provided by pinecone's official python package developed at {GITHUB_REPO}. You may have a name collision with an export from another dependency in your project that wraps Pinecone functionality and exports a similarly named class. Please refer to the following knowledge base article for more information: {KB_ARTICLE} """ - diff --git a/pinecone/control/pinecone.py b/pinecone/control/pinecone.py index 83b8384e..f4635376 100644 --- a/pinecone/control/pinecone.py +++ b/pinecone/control/pinecone.py @@ -16,7 +16,7 @@ CreateIndexRequest, ConfigureIndexRequest, ConfigureIndexRequestSpec, - ConfigureIndexRequestSpecPod + ConfigureIndexRequestSpecPod, ) from pinecone.models import ServerlessSpec, PodSpec, IndexList, CollectionList from .langchain_import_warnings import _build_langchain_attribute_error_message @@ -27,8 +27,8 @@ logger = logging.getLogger(__name__) -class Pinecone: +class Pinecone: def __init__( self, api_key: Optional[str] = None, @@ -44,7 +44,7 @@ def __init__( **kwargs, ): """ - The `Pinecone` class is the main entry point for interacting with Pinecone via this Python SDK. + The `Pinecone` class is the main entry point for interacting with Pinecone via this Python SDK. It is used to create, delete, and manage your indexes and collections. :param api_key: The API key to use for authentication. If not passed via kwarg, the API key will be read from the environment variable `PINECONE_API_KEY`. @@ -56,7 +56,7 @@ def __init__( :param proxy_headers: Additional headers to pass to the proxy. Use this if your proxy setup requires authentication. Default: `{}` :type proxy_headers: Dict[str, str], optional :param ssl_ca_certs: The path to the SSL CA certificate bundle to use for the connection. This path should point to a file in PEM format. Default: `None` - :type ssl_ca_certs: str, optional + :type ssl_ca_certs: str, optional :param ssl_verify: SSL verification is performed by default, but can be disabled using the boolean flag. Default: `True` :type ssl_verify: bool, optional :param config: A `pinecone.config.Config` object. If passed, the `api_key` and `host` parameters will be ignored. @@ -68,7 +68,7 @@ def __init__( :param index_api: An instance of `pinecone.core.client.api.manage_indexes_api.ManageIndexesApi`. If passed, the `host` parameter will be ignored. :type index_api: pinecone.core.client.api.manage_indexes_api.ManageIndexesApi, optional - + ### Configuration with environment variables If you instantiate the Pinecone client with no arguments, it will attempt to read the API key from the environment variable `PINECONE_API_KEY`. @@ -94,26 +94,26 @@ def __init__( The Pinecone client supports the following environment variables: - - `PINECONE_API_KEY`: The API key to use for authentication. If not passed via + - `PINECONE_API_KEY`: The API key to use for authentication. If not passed via kwarg, the API key will be read from the environment variable `PINECONE_API_KEY`. - - `PINECONE_DEBUG_CURL`: When troubleshooting it can be very useful to run curl - commands against the control plane API to see exactly what data is being sent + - `PINECONE_DEBUG_CURL`: When troubleshooting it can be very useful to run curl + commands against the control plane API to see exactly what data is being sent and received without all the abstractions and transformations applied by the Python SDK. If you set this environment variable to `true`, the Pinecone client will use request parameters to print out an equivalent curl command that you can run yourself - or share with Pinecone support. **Be very careful with this option, as it will print out + or share with Pinecone support. **Be very careful with this option, as it will print out your API key** which forms part of a required authentication header. Default: `false` - + ### Proxy configuration If your network setup requires you to interact with Pinecone via a proxy, you will need to pass additional configuration using optional keyword parameters. These optional parameters are forwarded to `urllib3`, which is the underlying library currently used by the Pinecone client to - make HTTP requests. You may find it helpful to refer to the - [urllib3 documentation on working with proxies](https://urllib3.readthedocs.io/en/stable/advanced-usage.html#http-and-https-proxies) - while troubleshooting these settings. - + make HTTP requests. You may find it helpful to refer to the + [urllib3 documentation on working with proxies](https://urllib3.readthedocs.io/en/stable/advanced-usage.html#http-and-https-proxies) + while troubleshooting these settings. + Here is a basic example: ```python @@ -144,8 +144,8 @@ def __init__( ### Using proxies with self-signed certificates - By default the Pinecone Python client will perform SSL certificate verification - using the CA bundle maintained by Mozilla in the [certifi](https://pypi.org/project/certifi/) package. + By default the Pinecone Python client will perform SSL certificate verification + using the CA bundle maintained by Mozilla in the [certifi](https://pypi.org/project/certifi/) package. If your proxy server is using a self-signed certificate, you will need to pass the path to the certificate in PEM format using the `ssl_ca_certs` parameter. @@ -165,7 +165,7 @@ def __init__( ### Disabling SSL verification - If you would like to disable SSL verification, you can pass the `ssl_verify` + If you would like to disable SSL verification, you can pass the `ssl_verify` parameter with a value of `False`. We do not recommend going to production with SSL verification disabled. ```python @@ -191,18 +191,21 @@ def __init__( self.config = config else: self.config = PineconeConfig.build( - api_key=api_key, + api_key=api_key, host=host, additional_headers=additional_headers, proxy_url=proxy_url, proxy_headers=proxy_headers, ssl_ca_certs=ssl_ca_certs, ssl_verify=ssl_verify, - **kwargs + **kwargs, ) if kwargs.get("openapi_config", None): - warnings.warn("Passing openapi_config is deprecated and will be removed in a future release. Please pass settings such as proxy_url, proxy_headers, ssl_ca_certs, and ssl_verify directly to the Pinecone constructor as keyword arguments. See the README at https://github.com/pinecone-io/pinecone-python-client for examples.", DeprecationWarning) + warnings.warn( + "Passing openapi_config is deprecated and will be removed in a future release. Please pass settings such as proxy_url, proxy_headers, ssl_ca_certs, and ssl_verify directly to the Pinecone constructor as keyword arguments. See the README at https://github.com/pinecone-io/pinecone-python-client for examples.", + DeprecationWarning, + ) self.openapi_config = ConfigBuilder.build_openapi_config(self.config, **kwargs) self.pool_threads = pool_threads @@ -215,7 +218,7 @@ def __init__( api_klass=ManageIndexesApi, config=self.config, openapi_config=self.openapi_config, - pool_threads=pool_threads + pool_threads=pool_threads, ) self.index_host_store = IndexHostStore() @@ -224,15 +227,13 @@ def __init__( self.load_plugins() def load_plugins(self): - """ @private """ + """@private""" try: # I don't expect this to ever throw, but wrapping this in a # try block just in case to make sure a bad plugin doesn't # halt client initialization. openapi_client_builder = build_plugin_setup_client( - config=self.config, - openapi_config=self.openapi_config, - pool_threads=self.pool_threads + config=self.config, openapi_config=self.openapi_config, pool_threads=self.pool_threads ) install_plugins(self, openapi_client_builder) except Exception as e: @@ -248,8 +249,8 @@ def create_index( ): """Creates a Pinecone index. - :param name: The name of the index to create. Must be unique within your project and - cannot be changed once created. Allowed characters are lowercase letters, numbers, + :param name: The name of the index to create. Must be unique within your project and + cannot be changed once created. Allowed characters are lowercase letters, numbers, and hyphens and the name may not begin or end with hyphens. Maximum length is 45 characters. :type name: str :param dimension: The dimension of vectors that will be inserted in the index. This should @@ -267,7 +268,7 @@ def create_index( if -1, return immediately and do not wait. Default: None ### Creating a serverless index - + ```python import os from pinecone import Pinecone, ServerlessSpec @@ -275,9 +276,9 @@ def create_index( client = Pinecone(api_key=os.environ.get("PINECONE_API_KEY")) client.create_index( - name="my_index", - dimension=1536, - metric="cosine", + name="my_index", + dimension=1536, + metric="cosine", spec=ServerlessSpec(cloud="aws", region="us-west-2") ) ``` @@ -295,7 +296,7 @@ def create_index( dimension=1536, metric="cosine", spec=PodSpec( - environment="us-east1-gcp", + environment="us-east1-gcp", pod_type="p1.x1" ) ) @@ -305,11 +306,21 @@ def create_index( api_instance = self.index_api if isinstance(spec, dict): - api_instance.create_index(create_index_request=CreateIndexRequest(name=name, dimension=dimension, metric=metric, spec=spec)) + api_instance.create_index( + create_index_request=CreateIndexRequest(name=name, dimension=dimension, metric=metric, spec=spec) + ) elif isinstance(spec, ServerlessSpec): - api_instance.create_index(create_index_request=CreateIndexRequest(name=name, dimension=dimension, metric=metric, spec=spec.asdict())) + api_instance.create_index( + create_index_request=CreateIndexRequest( + name=name, dimension=dimension, metric=metric, spec=spec.asdict() + ) + ) elif isinstance(spec, PodSpec): - api_instance.create_index(create_index_request=CreateIndexRequest(name=name, dimension=dimension, metric=metric, spec=spec.asdict())) + api_instance.create_index( + create_index_request=CreateIndexRequest( + name=name, dimension=dimension, metric=metric, spec=spec.asdict() + ) + ) else: raise TypeError("spec must be of type dict, ServerlessSpec, or PodSpec") @@ -340,17 +351,17 @@ def delete_index(self, name: str, timeout: Optional[int] = None): """Deletes a Pinecone index. Deleting an index is an irreversible operation. All data in the index will be lost. - When you use this command, a request is sent to the Pinecone control plane to delete + When you use this command, a request is sent to the Pinecone control plane to delete the index, but the termination is not synchronous because resources take a few moments to - be released. - + be released. + You can check the status of the index by calling the `describe_index()` command. - With repeated polling of the describe_index command, you will see the index transition to a + With repeated polling of the describe_index command, you will see the index transition to a `Terminating` state before eventually resulting in a 404 after it has been removed. :param name: the name of the index. :type name: str - :param timeout: Number of seconds to poll status checking whether the index has been deleted. If None, + :param timeout: Number of seconds to poll status checking whether the index has been deleted. If None, wait indefinitely; if >=0, time out after this many seconds; if -1, return immediately and do not wait. Default: None :type timeout: int, optional @@ -383,11 +394,11 @@ def get_remaining(): def list_indexes(self) -> IndexList: """Lists all indexes. - - The results include a description of all indexes in your project, including the + + The results include a description of all indexes in your project, including the index name, dimension, metric, status, and spec. - :return: Returns an `IndexList` object, which is iterable and contains a + :return: Returns an `IndexList` object, which is iterable and contains a list of `IndexDescription` objects. It also has a convenience method `names()` which returns a list of index names. @@ -406,7 +417,7 @@ def list_indexes(self) -> IndexList: spec=ServerlessSpec(cloud="aws", region="us-west-2") ) ``` - + You can also use the `list_indexes()` method to iterate over all indexes in your project and get other information besides just names. @@ -433,15 +444,15 @@ def describe_index(self, name: str): :param name: the name of the index to describe. :return: Returns an `IndexDescription` object - which gives access to properties such as the - index name, dimension, metric, host url, status, + which gives access to properties such as the + index name, dimension, metric, host url, status, and spec. ### Getting your index host url In a real production situation, you probably want to store the host url in an environment variable so you - don't have to call describe_index and re-fetch it + don't have to call describe_index and re-fetch it every time you want to use the index. But this example shows how to get the value from the API using describe_index. @@ -451,7 +462,7 @@ def describe_index(self, name: str): client = Pinecone() description = client.describe_index("my_index") - + host = description.host print(f"Your index is hosted at {description.host}") @@ -467,14 +478,14 @@ def describe_index(self, name: str): return description def configure_index(self, name: str, replicas: Optional[int] = None, pod_type: Optional[str] = None): - """This method is used to scale configuration fields for your pod-based Pinecone index. + """This method is used to scale configuration fields for your pod-based Pinecone index. :param: name: the name of the Index :param: replicas: the desired number of replicas, lowest value is 0. :param: pod_type: the new pod_type for the index. To learn more about the available pod types, please see [Understanding Indexes](https://docs.pinecone.io/docs/indexes) - - + + ```python from pinecone import Pinecone @@ -483,7 +494,7 @@ def configure_index(self, name: str, replicas: Optional[int] = None, pod_type: O # Make a configuration change client.configure_index(name="my_index", replicas=4) - # Call describe_index to see the index status as the + # Call describe_index to see the index status as the # change is applied. client.describe_index("my_index") ``` @@ -496,9 +507,7 @@ def configure_index(self, name: str, replicas: Optional[int] = None, pod_type: O if replicas: config_args.update(replicas=replicas) configure_index_request = ConfigureIndexRequest( - spec=ConfigureIndexRequestSpec( - pod=ConfigureIndexRequestSpecPod(**config_args) - ) + spec=ConfigureIndexRequestSpec(pod=ConfigureIndexRequestSpecPod(**config_args)) ) api_instance.configure_index(name, configure_index_request=configure_index_request) @@ -513,7 +522,7 @@ def create_collection(self, name: str, source: str): def list_collections(self) -> CollectionList: """List all collections - + ```python from pinecone import Pinecone @@ -539,12 +548,12 @@ def delete_collection(self, name: str): :param: name: The name of the collection - Deleting a collection is an irreversible operation. All data + Deleting a collection is an irreversible operation. All data in the collection will be lost. This method tells Pinecone you would like to delete a collection, - but it takes a few moments to complete the operation. Use the - `describe_collection()` method to confirm that the collection + but it takes a few moments to complete the operation. Use the + `describe_collection()` method to confirm that the collection has been deleted. """ api_instance = self.index_api @@ -557,7 +566,7 @@ def describe_collection(self, name: str): ```python from pinecone import Pinecone - + client = Pinecone() description = client.describe_collection("my_collection") @@ -579,12 +588,12 @@ def _get_status(self, name: str): @staticmethod def from_texts(*args, **kwargs): raise AttributeError(_build_langchain_attribute_error_message("from_texts")) - + @staticmethod def from_documents(*args, **kwargs): raise AttributeError(_build_langchain_attribute_error_message("from_documents")) - def Index(self, name: str = '', host: str = '', **kwargs): + def Index(self, name: str = "", host: str = "", **kwargs): """ Target an index for data operations. @@ -592,7 +601,7 @@ def Index(self, name: str = '', host: str = '', **kwargs): In production situations, you want to uspert or query your data as quickly as possible. If you know in advance the host url of your index, you can - eliminate a round trip to the Pinecone control plane by specifying the + eliminate a round trip to the Pinecone control plane by specifying the host of the index. ```python @@ -601,7 +610,7 @@ def Index(self, name: str = '', host: str = '', **kwargs): api_key = os.environ.get("PINECONE_API_KEY") index_host = os.environ.get("PINECONE_INDEX_HOST") - + pc = Pinecone(api_key=api_key) index = pc.Index(host=index_host) @@ -628,11 +637,11 @@ def Index(self, name: str = '', host: str = '', **kwargs): For more casual usage, such as when you are playing and exploring with Pinecone in a notebook setting, you can also target an index by name. If you use this - approach, the client may need to perform an extra call to the Pinecone control + approach, the client may need to perform an extra call to the Pinecone control plane to get the host url on your behalf to get the index host. - The client will cache the index host for future use whenever it is seen, so you - will only incur the overhead of only one call. But this approach is not + The client will cache the index host for future use whenever it is seen, so you + will only incur the overhead of only one call. But this approach is not recommended for production usage. ```python @@ -640,7 +649,7 @@ def Index(self, name: str = '', host: str = '', **kwargs): from pinecone import Pinecone, ServerlessSpec api_key = os.environ.get("PINECONE_API_KEY") - + pc = Pinecone(api_key=api_key) pc.create_index( name='my-index', @@ -654,16 +663,16 @@ def Index(self, name: str = '', host: str = '', **kwargs): index.query(vector=[...], top_k=10) ``` """ - if name == '' and host == '': + if name == "" and host == "": raise ValueError("Either name or host must be specified") - - pt = kwargs.pop('pool_threads', None) or self.pool_threads + + pt = kwargs.pop("pool_threads", None) or self.pool_threads api_key = self.config.api_key openapi_config = self.openapi_config - if host != '': + if host != "": # Use host url if it is provided - index_host=normalize_host(host) + index_host = normalize_host(host) else: # Otherwise, get host url from describe_index using the index name index_host = self.index_host_store.get_host(self.index_api, self.config, name) @@ -674,5 +683,5 @@ def Index(self, name: str = '', host: str = '', **kwargs): pool_threads=pt, openapi_config=openapi_config, source_tag=self.config.source_tag, - **kwargs - ) \ No newline at end of file + **kwargs, + ) diff --git a/pinecone/core/client/api/data_plane_api.py b/pinecone/core/client/api/data_plane_api.py index b4ad0548..d8c86326 100644 --- a/pinecone/core/client/api/data_plane_api.py +++ b/pinecone/core/client/api/data_plane_api.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -20,7 +19,7 @@ datetime, file_type, none_type, - validate_and_convert_types + validate_and_convert_types, ) from pinecone.core.client.model.delete_request import DeleteRequest from pinecone.core.client.model.describe_index_stats_request import DescribeIndexStatsRequest @@ -47,10 +46,7 @@ def __init__(self, api_client=None): api_client = ApiClient() self.api_client = api_client - def __alt_delete( - self, - **kwargs - ): + def __alt_delete(self, **kwargs): """Delete vectors # noqa: E501 DEPRECATED. Use [`POST /delete`](https://docs.pinecone.io/reference/delete) instead. # noqa: E501 @@ -90,93 +86,66 @@ def __alt_delete( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") return self.call_with_http_info(**kwargs) self.alt_delete = _Endpoint( settings={ - 'response_type': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/vectors/delete', - 'operation_id': 'alt_delete', - 'http_method': 'DELETE', - 'servers': None, + "response_type": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/vectors/delete", + "operation_id": "alt_delete", + "http_method": "DELETE", + "servers": None, }, params_map={ - 'all': [ - 'ids', - 'delete_all', - 'namespace', - ], - 'required': [], - 'nullable': [ + "all": [ + "ids", + "delete_all", + "namespace", ], - 'enum': [ - ], - 'validation': [ - ] + "required": [], + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { - }, - 'allowed_values': { + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "ids": ([str],), + "delete_all": (bool,), + "namespace": (str,), }, - 'openapi_types': { - 'ids': - ([str],), - 'delete_all': - (bool,), - 'namespace': - (str,), + "attribute_map": { + "ids": "ids", + "delete_all": "deleteAll", + "namespace": "namespace", }, - 'attribute_map': { - 'ids': 'ids', - 'delete_all': 'deleteAll', - 'namespace': 'namespace', + "location_map": { + "ids": "query", + "delete_all": "query", + "namespace": "query", }, - 'location_map': { - 'ids': 'query', - 'delete_all': 'query', - 'namespace': 'query', + "collection_format_map": { + "ids": "multi", }, - 'collection_format_map': { - 'ids': 'multi', - } }, headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], + "accept": ["application/json"], + "content_type": [], }, api_client=api_client, - callable=__alt_delete + callable=__alt_delete, ) - def __alt_describe_index_stats( - self, - **kwargs - ): + def __alt_describe_index_stats(self, **kwargs): """Get index stats # noqa: E501 DEPRECATED. Use [`POST /describe_index_stats`](https://docs.pinecone.io/reference/describe_index_stats) instead. # noqa: E501 @@ -213,78 +182,42 @@ def __alt_describe_index_stats( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") return self.call_with_http_info(**kwargs) self.alt_describe_index_stats = _Endpoint( settings={ - 'response_type': (DescribeIndexStatsResponse,), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/describe_index_stats', - 'operation_id': 'alt_describe_index_stats', - 'http_method': 'GET', - 'servers': None, - }, - params_map={ - 'all': [ - ], - 'required': [], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] - }, + "response_type": (DescribeIndexStatsResponse,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/describe_index_stats", + "operation_id": "alt_describe_index_stats", + "http_method": "GET", + "servers": None, + }, + params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - }, - 'attribute_map': { - }, - 'location_map': { - }, - 'collection_format_map': { - } + "validations": {}, + "allowed_values": {}, + "openapi_types": {}, + "attribute_map": {}, + "location_map": {}, + "collection_format_map": {}, }, headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], + "accept": ["application/json"], + "content_type": [], }, api_client=api_client, - callable=__alt_describe_index_stats + callable=__alt_describe_index_stats, ) - def __delete( - self, - delete_request, - **kwargs - ): + def __delete(self, delete_request, **kwargs): """Delete vectors # noqa: E501 The `delete` operation deletes vectors, by id, from a single namespace. For guidance and examples, see [Delete data](https://docs.pinecone.io/docs/delete-data). # noqa: E501 @@ -323,88 +256,54 @@ def __delete( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['delete_request'] = \ - delete_request + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") + kwargs["delete_request"] = delete_request return self.call_with_http_info(**kwargs) self.delete = _Endpoint( settings={ - 'response_type': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/vectors/delete', - 'operation_id': 'delete', - 'http_method': 'POST', - 'servers': None, + "response_type": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/vectors/delete", + "operation_id": "delete", + "http_method": "POST", + "servers": None, }, params_map={ - 'all': [ - 'delete_request', + "all": [ + "delete_request", ], - 'required': [ - 'delete_request', + "required": [ + "delete_request", ], - 'nullable': [ - ], - 'enum': [ - ], - 'validation': [ - ] + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'delete_request': - (DeleteRequest,), - }, - 'attribute_map': { + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "delete_request": (DeleteRequest,), }, - 'location_map': { - 'delete_request': 'body', + "attribute_map": {}, + "location_map": { + "delete_request": "body", }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] + "collection_format_map": {}, }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, - callable=__delete + callable=__delete, ) - def __describe_index_stats( - self, - describe_index_stats_request, - **kwargs - ): + def __describe_index_stats(self, describe_index_stats_request, **kwargs): """Get index stats # noqa: E501 The `describe_index_stats` operation returns statistics about the contents of an index, including the vector count per namespace and the number of dimensions, and the index fullness. Serverless indexes scale automatically as needed, so index fullness is relevant only for pod-based indexes. For pod-based indexes, the index fullness result may be inaccurate during pod resizing; to get the status of a pod resizing process, use [`describe_index`](https://www.pinecone.io/docs/api/operation/describe_index/). # noqa: E501 @@ -443,88 +342,54 @@ def __describe_index_stats( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['describe_index_stats_request'] = \ - describe_index_stats_request + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") + kwargs["describe_index_stats_request"] = describe_index_stats_request return self.call_with_http_info(**kwargs) self.describe_index_stats = _Endpoint( settings={ - 'response_type': (DescribeIndexStatsResponse,), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/describe_index_stats', - 'operation_id': 'describe_index_stats', - 'http_method': 'POST', - 'servers': None, + "response_type": (DescribeIndexStatsResponse,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/describe_index_stats", + "operation_id": "describe_index_stats", + "http_method": "POST", + "servers": None, }, params_map={ - 'all': [ - 'describe_index_stats_request', - ], - 'required': [ - 'describe_index_stats_request', + "all": [ + "describe_index_stats_request", ], - 'nullable': [ + "required": [ + "describe_index_stats_request", ], - 'enum': [ - ], - 'validation': [ - ] + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'describe_index_stats_request': - (DescribeIndexStatsRequest,), + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "describe_index_stats_request": (DescribeIndexStatsRequest,), }, - 'attribute_map': { + "attribute_map": {}, + "location_map": { + "describe_index_stats_request": "body", }, - 'location_map': { - 'describe_index_stats_request': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] + "collection_format_map": {}, }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, - callable=__describe_index_stats + callable=__describe_index_stats, ) - def __fetch( - self, - ids, - **kwargs - ): + def __fetch(self, ids, **kwargs): """Fetch vectors # noqa: E501 The `fetch` operation looks up and returns vectors, by ID, from a single namespace. The returned vectors include the vector data and/or metadata. For guidance and examples, see [Fetch data](https://docs.pinecone.io/reference/fetch). # noqa: E501 @@ -564,92 +429,65 @@ def __fetch( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['ids'] = \ - ids + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") + kwargs["ids"] = ids return self.call_with_http_info(**kwargs) self.fetch = _Endpoint( settings={ - 'response_type': (FetchResponse,), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/vectors/fetch', - 'operation_id': 'fetch', - 'http_method': 'GET', - 'servers': None, + "response_type": (FetchResponse,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/vectors/fetch", + "operation_id": "fetch", + "http_method": "GET", + "servers": None, }, params_map={ - 'all': [ - 'ids', - 'namespace', - ], - 'required': [ - 'ids', + "all": [ + "ids", + "namespace", ], - 'nullable': [ + "required": [ + "ids", ], - 'enum': [ - ], - 'validation': [ - ] + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { - }, - 'allowed_values': { + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "ids": ([str],), + "namespace": (str,), }, - 'openapi_types': { - 'ids': - ([str],), - 'namespace': - (str,), + "attribute_map": { + "ids": "ids", + "namespace": "namespace", }, - 'attribute_map': { - 'ids': 'ids', - 'namespace': 'namespace', + "location_map": { + "ids": "query", + "namespace": "query", }, - 'location_map': { - 'ids': 'query', - 'namespace': 'query', + "collection_format_map": { + "ids": "multi", }, - 'collection_format_map': { - 'ids': 'multi', - } }, headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], + "accept": ["application/json"], + "content_type": [], }, api_client=api_client, - callable=__fetch + callable=__fetch, ) - def __list( - self, - **kwargs - ): + def __list(self, **kwargs): """List vector IDs # noqa: E501 The `list` operation lists the IDs of vectors in a single namespace of a serverless index. An optional prefix can be passed to limit the results to IDs with a common prefix. `list` returns up to 100 IDs at a time by default in sorted order (bitwise/\"C\" collation). If the `limit` parameter is set, `list` returns up to that number of IDs instead. Whenever there are additional IDs to return, the response also includes a `pagination_token` that you can use to get the next batch of IDs. When the response does not include a `pagination_token`, there are no more IDs to return. For guidance and examples, see [Get record IDs](https://docs.pinecone.io/docs/get-record-ids). **Note:** `list` is supported only for serverless indexes. # noqa: E501 @@ -690,98 +528,68 @@ def __list( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") return self.call_with_http_info(**kwargs) self.list = _Endpoint( settings={ - 'response_type': (ListResponse,), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/vectors/list', - 'operation_id': 'list', - 'http_method': 'GET', - 'servers': None, + "response_type": (ListResponse,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/vectors/list", + "operation_id": "list", + "http_method": "GET", + "servers": None, }, params_map={ - 'all': [ - 'prefix', - 'limit', - 'pagination_token', - 'namespace', - ], - 'required': [], - 'nullable': [ + "all": [ + "prefix", + "limit", + "pagination_token", + "namespace", ], - 'enum': [ - ], - 'validation': [ - ] + "required": [], + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'prefix': - (str,), - 'limit': - (int,), - 'pagination_token': - (str,), - 'namespace': - (str,), - }, - 'attribute_map': { - 'prefix': 'prefix', - 'limit': 'limit', - 'pagination_token': 'paginationToken', - 'namespace': 'namespace', - }, - 'location_map': { - 'prefix': 'query', - 'limit': 'query', - 'pagination_token': 'query', - 'namespace': 'query', - }, - 'collection_format_map': { - } + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "prefix": (str,), + "limit": (int,), + "pagination_token": (str,), + "namespace": (str,), + }, + "attribute_map": { + "prefix": "prefix", + "limit": "limit", + "pagination_token": "paginationToken", + "namespace": "namespace", + }, + "location_map": { + "prefix": "query", + "limit": "query", + "pagination_token": "query", + "namespace": "query", + }, + "collection_format_map": {}, }, headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], + "accept": ["application/json"], + "content_type": [], }, api_client=api_client, - callable=__list + callable=__list, ) - def __query( - self, - query_request, - **kwargs - ): + def __query(self, query_request, **kwargs): """Query vectors # noqa: E501 The `query` operation searches a namespace, using a query vector. It retrieves the ids of the most similar items in a namespace, along with their similarity scores. For guidance and examples, see [Query data](https://docs.pinecone.io/docs/query-data). # noqa: E501 @@ -820,88 +628,54 @@ def __query( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['query_request'] = \ - query_request + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") + kwargs["query_request"] = query_request return self.call_with_http_info(**kwargs) self.query = _Endpoint( settings={ - 'response_type': (QueryResponse,), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/query', - 'operation_id': 'query', - 'http_method': 'POST', - 'servers': None, + "response_type": (QueryResponse,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/query", + "operation_id": "query", + "http_method": "POST", + "servers": None, }, params_map={ - 'all': [ - 'query_request', - ], - 'required': [ - 'query_request', - ], - 'nullable': [ + "all": [ + "query_request", ], - 'enum': [ + "required": [ + "query_request", ], - 'validation': [ - ] + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "query_request": (QueryRequest,), }, - 'allowed_values': { + "attribute_map": {}, + "location_map": { + "query_request": "body", }, - 'openapi_types': { - 'query_request': - (QueryRequest,), - }, - 'attribute_map': { - }, - 'location_map': { - 'query_request': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] + "collection_format_map": {}, }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, - callable=__query + callable=__query, ) - def __update( - self, - update_request, - **kwargs - ): + def __update(self, update_request, **kwargs): """Update a vector # noqa: E501 The `update` operation updates a vector in a namespace. If a value is included, it will overwrite the previous value. If a `set_metadata` is included, the values of the fields specified in it will be added or overwrite the previous value. For guidance and examples, see [Update data](https://docs.pinecone.io/reference/update). # noqa: E501 @@ -940,88 +714,54 @@ def __update( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['update_request'] = \ - update_request + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") + kwargs["update_request"] = update_request return self.call_with_http_info(**kwargs) self.update = _Endpoint( settings={ - 'response_type': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/vectors/update', - 'operation_id': 'update', - 'http_method': 'POST', - 'servers': None, + "response_type": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/vectors/update", + "operation_id": "update", + "http_method": "POST", + "servers": None, }, params_map={ - 'all': [ - 'update_request', - ], - 'required': [ - 'update_request', + "all": [ + "update_request", ], - 'nullable': [ + "required": [ + "update_request", ], - 'enum': [ - ], - 'validation': [ - ] + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { - }, - 'allowed_values': { + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "update_request": (UpdateRequest,), }, - 'openapi_types': { - 'update_request': - (UpdateRequest,), + "attribute_map": {}, + "location_map": { + "update_request": "body", }, - 'attribute_map': { - }, - 'location_map': { - 'update_request': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] + "collection_format_map": {}, }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, - callable=__update + callable=__update, ) - def __upsert( - self, - upsert_request, - **kwargs - ): + def __upsert(self, upsert_request, **kwargs): """Upsert vectors # noqa: E501 The `upsert` operation writes vectors into a namespace. If a new value is upserted for an existing vector ID, it will overwrite the previous value. For guidance and examples, see [Upsert data](https://docs.pinecone.io/docs/upsert-data). # noqa: E501 @@ -1060,79 +800,49 @@ def __upsert( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['upsert_request'] = \ - upsert_request + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") + kwargs["upsert_request"] = upsert_request return self.call_with_http_info(**kwargs) self.upsert = _Endpoint( settings={ - 'response_type': (UpsertResponse,), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/vectors/upsert', - 'operation_id': 'upsert', - 'http_method': 'POST', - 'servers': None, + "response_type": (UpsertResponse,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/vectors/upsert", + "operation_id": "upsert", + "http_method": "POST", + "servers": None, }, params_map={ - 'all': [ - 'upsert_request', - ], - 'required': [ - 'upsert_request', - ], - 'nullable': [ + "all": [ + "upsert_request", ], - 'enum': [ + "required": [ + "upsert_request", ], - 'validation': [ - ] + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'upsert_request': - (UpsertRequest,), + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "upsert_request": (UpsertRequest,), }, - 'attribute_map': { + "attribute_map": {}, + "location_map": { + "upsert_request": "body", }, - 'location_map': { - 'upsert_request': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] + "collection_format_map": {}, }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, - callable=__upsert + callable=__upsert, ) diff --git a/pinecone/core/client/api/manage_indexes_api.py b/pinecone/core/client/api/manage_indexes_api.py index dc1f35b1..64f35d46 100644 --- a/pinecone/core/client/api/manage_indexes_api.py +++ b/pinecone/core/client/api/manage_indexes_api.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -20,7 +19,7 @@ datetime, file_type, none_type, - validate_and_convert_types + validate_and_convert_types, ) from pinecone.core.client.model.collection_list import CollectionList from pinecone.core.client.model.collection_model import CollectionModel @@ -44,12 +43,7 @@ def __init__(self, api_client=None): api_client = ApiClient() self.api_client = api_client - def __configure_index( - self, - index_name, - configure_index_request, - **kwargs - ): + def __configure_index(self, index_name, configure_index_request, **kwargs): """Configure an index # noqa: E501 This operation specifies the pod type and number of replicas for an index. It applies to pod-based indexes only. Serverless indexes scale automatically based on usage. # noqa: E501 @@ -89,101 +83,66 @@ def __configure_index( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['index_name'] = \ - index_name - kwargs['configure_index_request'] = \ - configure_index_request + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") + kwargs["index_name"] = index_name + kwargs["configure_index_request"] = configure_index_request return self.call_with_http_info(**kwargs) self.configure_index = _Endpoint( settings={ - 'response_type': (IndexModel,), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/indexes/{index_name}', - 'operation_id': 'configure_index', - 'http_method': 'PATCH', - 'servers': [ + "response_type": (IndexModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/{index_name}", + "operation_id": "configure_index", + "http_method": "PATCH", + "servers": [ { - 'url': "https://api.pinecone.io", - 'description': "No description provided", + "url": "https://api.pinecone.io", + "description": "No description provided", }, - ] + ], }, params_map={ - 'all': [ - 'index_name', - 'configure_index_request', - ], - 'required': [ - 'index_name', - 'configure_index_request', - ], - 'nullable': [ + "all": [ + "index_name", + "configure_index_request", ], - 'enum': [ + "required": [ + "index_name", + "configure_index_request", ], - 'validation': [ - ] + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "index_name": (str,), + "configure_index_request": (ConfigureIndexRequest,), }, - 'allowed_values': { + "attribute_map": { + "index_name": "index_name", }, - 'openapi_types': { - 'index_name': - (str,), - 'configure_index_request': - (ConfigureIndexRequest,), + "location_map": { + "index_name": "path", + "configure_index_request": "body", }, - 'attribute_map': { - 'index_name': 'index_name', - }, - 'location_map': { - 'index_name': 'path', - 'configure_index_request': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] + "collection_format_map": {}, }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, - callable=__configure_index + callable=__configure_index, ) - def __create_collection( - self, - create_collection_request, - **kwargs - ): + def __create_collection(self, create_collection_request, **kwargs): """Create a collection # noqa: E501 This operation creates a Pinecone collection. Serverless and starter indexes do not support collections. # noqa: E501 @@ -222,93 +181,59 @@ def __create_collection( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['create_collection_request'] = \ - create_collection_request + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") + kwargs["create_collection_request"] = create_collection_request return self.call_with_http_info(**kwargs) self.create_collection = _Endpoint( settings={ - 'response_type': (CollectionModel,), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/collections', - 'operation_id': 'create_collection', - 'http_method': 'POST', - 'servers': [ + "response_type": (CollectionModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/collections", + "operation_id": "create_collection", + "http_method": "POST", + "servers": [ { - 'url': "https://api.pinecone.io", - 'description': "No description provided", + "url": "https://api.pinecone.io", + "description": "No description provided", }, - ] + ], }, params_map={ - 'all': [ - 'create_collection_request', - ], - 'required': [ - 'create_collection_request', + "all": [ + "create_collection_request", ], - 'nullable': [ + "required": [ + "create_collection_request", ], - 'enum': [ - ], - 'validation': [ - ] + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { - }, - 'allowed_values': { + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "create_collection_request": (CreateCollectionRequest,), }, - 'openapi_types': { - 'create_collection_request': - (CreateCollectionRequest,), + "attribute_map": {}, + "location_map": { + "create_collection_request": "body", }, - 'attribute_map': { - }, - 'location_map': { - 'create_collection_request': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] + "collection_format_map": {}, }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, - callable=__create_collection + callable=__create_collection, ) - def __create_index( - self, - create_index_request, - **kwargs - ): + def __create_index(self, create_index_request, **kwargs): """Create an index # noqa: E501 This operation deploys a Pinecone index. This is where you specify the measure of similarity, the dimension of vectors to be stored in the index, which cloud provider you would like to deploy with, and more. For guidance and examples, see [Create an index](https://docs.pinecone.io/docs/manage-indexes#create-a-serverless-index). # noqa: E501 @@ -347,93 +272,59 @@ def __create_index( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['create_index_request'] = \ - create_index_request + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") + kwargs["create_index_request"] = create_index_request return self.call_with_http_info(**kwargs) self.create_index = _Endpoint( settings={ - 'response_type': (IndexModel,), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/indexes', - 'operation_id': 'create_index', - 'http_method': 'POST', - 'servers': [ + "response_type": (IndexModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes", + "operation_id": "create_index", + "http_method": "POST", + "servers": [ { - 'url': "https://api.pinecone.io", - 'description': "No description provided", + "url": "https://api.pinecone.io", + "description": "No description provided", }, - ] + ], }, params_map={ - 'all': [ - 'create_index_request', - ], - 'required': [ - 'create_index_request', + "all": [ + "create_index_request", ], - 'nullable': [ + "required": [ + "create_index_request", ], - 'enum': [ - ], - 'validation': [ - ] + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { - }, - 'allowed_values': { + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "create_index_request": (CreateIndexRequest,), }, - 'openapi_types': { - 'create_index_request': - (CreateIndexRequest,), + "attribute_map": {}, + "location_map": { + "create_index_request": "body", }, - 'attribute_map': { - }, - 'location_map': { - 'create_index_request': 'body', - }, - 'collection_format_map': { - } - }, - headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [ - 'application/json' - ] + "collection_format_map": {}, }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, api_client=api_client, - callable=__create_index + callable=__create_index, ) - def __delete_collection( - self, - collection_name, - **kwargs - ): + def __delete_collection(self, collection_name, **kwargs): """Delete a collection # noqa: E501 This operation deletes an existing collection. Serverless and starter indexes do not support collections. # noqa: E501 @@ -472,93 +363,64 @@ def __delete_collection( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['collection_name'] = \ - collection_name + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") + kwargs["collection_name"] = collection_name return self.call_with_http_info(**kwargs) self.delete_collection = _Endpoint( settings={ - 'response_type': (str,), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/collections/{collection_name}', - 'operation_id': 'delete_collection', - 'http_method': 'DELETE', - 'servers': [ + "response_type": (str,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/collections/{collection_name}", + "operation_id": "delete_collection", + "http_method": "DELETE", + "servers": [ { - 'url': "https://api.pinecone.io", - 'description': "No description provided", + "url": "https://api.pinecone.io", + "description": "No description provided", }, - ] + ], }, params_map={ - 'all': [ - 'collection_name', - ], - 'required': [ - 'collection_name', + "all": [ + "collection_name", ], - 'nullable': [ + "required": [ + "collection_name", ], - 'enum': [ - ], - 'validation': [ - ] + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { - }, - 'allowed_values': { + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "collection_name": (str,), }, - 'openapi_types': { - 'collection_name': - (str,), + "attribute_map": { + "collection_name": "collection_name", }, - 'attribute_map': { - 'collection_name': 'collection_name', + "location_map": { + "collection_name": "path", }, - 'location_map': { - 'collection_name': 'path', - }, - 'collection_format_map': { - } + "collection_format_map": {}, }, headers_map={ - 'accept': [ - 'text/plain', - 'application/json' - ], - 'content_type': [], + "accept": ["text/plain", "application/json"], + "content_type": [], }, api_client=api_client, - callable=__delete_collection + callable=__delete_collection, ) - def __delete_index( - self, - index_name, - **kwargs - ): + def __delete_index(self, index_name, **kwargs): """Delete an index # noqa: E501 This operation deletes an existing index. # noqa: E501 @@ -597,92 +459,64 @@ def __delete_index( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['index_name'] = \ - index_name + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") + kwargs["index_name"] = index_name return self.call_with_http_info(**kwargs) self.delete_index = _Endpoint( settings={ - 'response_type': None, - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/indexes/{index_name}', - 'operation_id': 'delete_index', - 'http_method': 'DELETE', - 'servers': [ + "response_type": None, + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/{index_name}", + "operation_id": "delete_index", + "http_method": "DELETE", + "servers": [ { - 'url': "https://api.pinecone.io", - 'description': "No description provided", + "url": "https://api.pinecone.io", + "description": "No description provided", }, - ] + ], }, params_map={ - 'all': [ - 'index_name', - ], - 'required': [ - 'index_name', + "all": [ + "index_name", ], - 'nullable': [ + "required": [ + "index_name", ], - 'enum': [ - ], - 'validation': [ - ] + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'index_name': - (str,), + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "index_name": (str,), }, - 'attribute_map': { - 'index_name': 'index_name', + "attribute_map": { + "index_name": "index_name", }, - 'location_map': { - 'index_name': 'path', + "location_map": { + "index_name": "path", }, - 'collection_format_map': { - } + "collection_format_map": {}, }, headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], + "accept": ["application/json"], + "content_type": [], }, api_client=api_client, - callable=__delete_index + callable=__delete_index, ) - def __describe_collection( - self, - collection_name, - **kwargs - ): + def __describe_collection(self, collection_name, **kwargs): """Describe a collection # noqa: E501 This operation gets a description of a collection. Serverless and starter indexes do not support collections. # noqa: E501 @@ -721,92 +555,64 @@ def __describe_collection( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['collection_name'] = \ - collection_name + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") + kwargs["collection_name"] = collection_name return self.call_with_http_info(**kwargs) self.describe_collection = _Endpoint( settings={ - 'response_type': (CollectionModel,), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/collections/{collection_name}', - 'operation_id': 'describe_collection', - 'http_method': 'GET', - 'servers': [ + "response_type": (CollectionModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/collections/{collection_name}", + "operation_id": "describe_collection", + "http_method": "GET", + "servers": [ { - 'url': "https://api.pinecone.io", - 'description': "No description provided", + "url": "https://api.pinecone.io", + "description": "No description provided", }, - ] + ], }, params_map={ - 'all': [ - 'collection_name', - ], - 'required': [ - 'collection_name', - ], - 'nullable': [ + "all": [ + "collection_name", ], - 'enum': [ + "required": [ + "collection_name", ], - 'validation': [ - ] + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - 'collection_name': - (str,), + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "collection_name": (str,), }, - 'attribute_map': { - 'collection_name': 'collection_name', + "attribute_map": { + "collection_name": "collection_name", }, - 'location_map': { - 'collection_name': 'path', + "location_map": { + "collection_name": "path", }, - 'collection_format_map': { - } + "collection_format_map": {}, }, headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], + "accept": ["application/json"], + "content_type": [], }, api_client=api_client, - callable=__describe_collection + callable=__describe_collection, ) - def __describe_index( - self, - index_name, - **kwargs - ): + def __describe_index(self, index_name, **kwargs): """Describe an index # noqa: E501 Get a description of an index. # noqa: E501 @@ -845,91 +651,64 @@ def __describe_index( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') - kwargs['index_name'] = \ - index_name + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") + kwargs["index_name"] = index_name return self.call_with_http_info(**kwargs) self.describe_index = _Endpoint( settings={ - 'response_type': (IndexModel,), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/indexes/{index_name}', - 'operation_id': 'describe_index', - 'http_method': 'GET', - 'servers': [ + "response_type": (IndexModel,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes/{index_name}", + "operation_id": "describe_index", + "http_method": "GET", + "servers": [ { - 'url': "https://api.pinecone.io", - 'description': "No description provided", + "url": "https://api.pinecone.io", + "description": "No description provided", }, - ] + ], }, params_map={ - 'all': [ - 'index_name', - ], - 'required': [ - 'index_name', - ], - 'nullable': [ + "all": [ + "index_name", ], - 'enum': [ + "required": [ + "index_name", ], - 'validation': [ - ] + "nullable": [], + "enum": [], + "validation": [], }, root_map={ - 'validations': { + "validations": {}, + "allowed_values": {}, + "openapi_types": { + "index_name": (str,), }, - 'allowed_values': { + "attribute_map": { + "index_name": "index_name", }, - 'openapi_types': { - 'index_name': - (str,), + "location_map": { + "index_name": "path", }, - 'attribute_map': { - 'index_name': 'index_name', - }, - 'location_map': { - 'index_name': 'path', - }, - 'collection_format_map': { - } + "collection_format_map": {}, }, headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], + "accept": ["application/json"], + "content_type": [], }, api_client=api_client, - callable=__describe_index + callable=__describe_index, ) - def __list_collections( - self, - **kwargs - ): + def __list_collections(self, **kwargs): """List collections # noqa: E501 This operation returns a list of all collections in a project. Serverless and starter indexes do not support collections. # noqa: E501 @@ -966,82 +745,47 @@ def __list_collections( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") return self.call_with_http_info(**kwargs) self.list_collections = _Endpoint( settings={ - 'response_type': (CollectionList,), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/collections', - 'operation_id': 'list_collections', - 'http_method': 'GET', - 'servers': [ + "response_type": (CollectionList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/collections", + "operation_id": "list_collections", + "http_method": "GET", + "servers": [ { - 'url': "https://api.pinecone.io", - 'description': "No description provided", + "url": "https://api.pinecone.io", + "description": "No description provided", }, - ] - }, - params_map={ - 'all': [ - ], - 'required': [], - 'nullable': [ - ], - 'enum': [ ], - 'validation': [ - ] }, + params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - }, - 'attribute_map': { - }, - 'location_map': { - }, - 'collection_format_map': { - } + "validations": {}, + "allowed_values": {}, + "openapi_types": {}, + "attribute_map": {}, + "location_map": {}, + "collection_format_map": {}, }, headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], + "accept": ["application/json"], + "content_type": [], }, api_client=api_client, - callable=__list_collections + callable=__list_collections, ) - def __list_indexes( - self, - **kwargs - ): + def __list_indexes(self, **kwargs): """List indexes # noqa: E501 This operation returns a list of all indexes in a project. # noqa: E501 @@ -1078,74 +822,42 @@ def __list_indexes( If the method is called asynchronously, returns the request thread. """ - kwargs['async_req'] = kwargs.get( - 'async_req', False - ) - kwargs['_return_http_data_only'] = kwargs.get( - '_return_http_data_only', True - ) - kwargs['_preload_content'] = kwargs.get( - '_preload_content', True - ) - kwargs['_request_timeout'] = kwargs.get( - '_request_timeout', None - ) - kwargs['_check_input_type'] = kwargs.get( - '_check_input_type', True - ) - kwargs['_check_return_type'] = kwargs.get( - '_check_return_type', True - ) - kwargs['_host_index'] = kwargs.get('_host_index') + kwargs["async_req"] = kwargs.get("async_req", False) + kwargs["_return_http_data_only"] = kwargs.get("_return_http_data_only", True) + kwargs["_preload_content"] = kwargs.get("_preload_content", True) + kwargs["_request_timeout"] = kwargs.get("_request_timeout", None) + kwargs["_check_input_type"] = kwargs.get("_check_input_type", True) + kwargs["_check_return_type"] = kwargs.get("_check_return_type", True) + kwargs["_host_index"] = kwargs.get("_host_index") return self.call_with_http_info(**kwargs) self.list_indexes = _Endpoint( settings={ - 'response_type': (IndexList,), - 'auth': [ - 'ApiKeyAuth' - ], - 'endpoint_path': '/indexes', - 'operation_id': 'list_indexes', - 'http_method': 'GET', - 'servers': [ + "response_type": (IndexList,), + "auth": ["ApiKeyAuth"], + "endpoint_path": "/indexes", + "operation_id": "list_indexes", + "http_method": "GET", + "servers": [ { - 'url': "https://api.pinecone.io", - 'description': "No description provided", + "url": "https://api.pinecone.io", + "description": "No description provided", }, - ] - }, - params_map={ - 'all': [ - ], - 'required': [], - 'nullable': [ ], - 'enum': [ - ], - 'validation': [ - ] }, + params_map={"all": [], "required": [], "nullable": [], "enum": [], "validation": []}, root_map={ - 'validations': { - }, - 'allowed_values': { - }, - 'openapi_types': { - }, - 'attribute_map': { - }, - 'location_map': { - }, - 'collection_format_map': { - } + "validations": {}, + "allowed_values": {}, + "openapi_types": {}, + "attribute_map": {}, + "location_map": {}, + "collection_format_map": {}, }, headers_map={ - 'accept': [ - 'application/json' - ], - 'content_type': [], + "accept": ["application/json"], + "content_type": [], }, api_client=api_client, - callable=__list_indexes + callable=__list_indexes, ) diff --git a/pinecone/core/client/api_client.py b/pinecone/core/client/api_client.py index 6bc6a31c..5d7999e4 100644 --- a/pinecone/core/client/api_client.py +++ b/pinecone/core/client/api_client.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import json import atexit import mimetypes @@ -36,7 +35,7 @@ file_type, model_to_dict, none_type, - validate_and_convert_types + validate_and_convert_types, ) @@ -64,8 +63,7 @@ class ApiClient(object): _pool = None - def __init__(self, configuration=None, header_name=None, header_value=None, - cookie=None, pool_threads=1): + def __init__(self, configuration=None, header_name=None, header_value=None, cookie=None, pool_threads=1): if configuration is None: configuration = Configuration.get_default_copy() self.configuration = configuration @@ -77,7 +75,7 @@ def __init__(self, configuration=None, header_name=None, header_value=None, self.default_headers[header_name] = header_value self.cookie = cookie # Set default User-Agent. - self.user_agent = 'OpenAPI-Generator/1.0.0/python' + self.user_agent = "OpenAPI-Generator/1.0.0/python" def __enter__(self): return self @@ -90,13 +88,13 @@ def close(self): self._pool.close() self._pool.join() self._pool = None - if hasattr(atexit, 'unregister'): + if hasattr(atexit, "unregister"): atexit.unregister(self.close) @property def pool(self): """Create thread pool on first request - avoids instantiating unused threadpool for blocking clients. + avoids instantiating unused threadpool for blocking clients. """ if self._pool is None: atexit.register(self.close) @@ -106,11 +104,11 @@ def pool(self): @property def user_agent(self): """User agent for this API client""" - return self.default_headers['User-Agent'] + return self.default_headers["User-Agent"] @user_agent.setter def user_agent(self, value): - self.default_headers['User-Agent'] = value + self.default_headers["User-Agent"] = value def set_default_header(self, header_name, header_value): self.default_headers[header_name] = header_value @@ -132,57 +130,47 @@ def __call_api( _preload_content: bool = True, _request_timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None, _host: typing.Optional[str] = None, - _check_type: typing.Optional[bool] = None + _check_type: typing.Optional[bool] = None, ): - config = self.configuration # header parameters header_params = header_params or {} header_params.update(self.default_headers) if self.cookie: - header_params['Cookie'] = self.cookie + header_params["Cookie"] = self.cookie if header_params: header_params = self.sanitize_for_serialization(header_params) - header_params = dict(self.parameters_to_tuples(header_params, - collection_formats)) + header_params = dict(self.parameters_to_tuples(header_params, collection_formats)) # path parameters if path_params: path_params = self.sanitize_for_serialization(path_params) - path_params = self.parameters_to_tuples(path_params, - collection_formats) + path_params = self.parameters_to_tuples(path_params, collection_formats) for k, v in path_params: # specified safe chars, encode everything - resource_path = resource_path.replace( - '{%s}' % k, - quote(str(v), safe=config.safe_chars_for_path_param) - ) + resource_path = resource_path.replace("{%s}" % k, quote(str(v), safe=config.safe_chars_for_path_param)) # query parameters if query_params: query_params = self.sanitize_for_serialization(query_params) - query_params = self.parameters_to_tuples(query_params, - collection_formats) + query_params = self.parameters_to_tuples(query_params, collection_formats) # post parameters if post_params or files: post_params = post_params if post_params else [] post_params = self.sanitize_for_serialization(post_params) - post_params = self.parameters_to_tuples(post_params, - collection_formats) + post_params = self.parameters_to_tuples(post_params, collection_formats) post_params.extend(self.files_parameters(files)) - if header_params['Content-Type'].startswith("multipart"): - post_params = self.parameters_to_multipart(post_params, - (dict) ) + if header_params["Content-Type"].startswith("multipart"): + post_params = self.parameters_to_multipart(post_params, (dict)) # body if body: body = self.sanitize_for_serialization(body) # auth setting - self.update_params_for_auth(header_params, query_params, - auth_settings, resource_path, method, body) + self.update_params_for_auth(header_params, query_params, auth_settings, resource_path, method, body) # request url if _host is None: @@ -194,12 +182,17 @@ def __call_api( try: # perform request and return response response_data = self.request( - method, url, query_params=query_params, headers=header_params, - post_params=post_params, body=body, + method, + url, + query_params=query_params, + headers=header_params, + post_params=post_params, + body=body, _preload_content=_preload_content, - _request_timeout=_request_timeout) + _request_timeout=_request_timeout, + ) except PineconeApiException as e: - e.body = e.body.decode('utf-8') + e.body = e.body.decode("utf-8") raise e self.last_response = response_data @@ -207,33 +200,28 @@ def __call_api( return_data = response_data if not _preload_content: - return (return_data) + return return_data return return_data # deserialize response data if response_type: if response_type != (file_type,): encoding = "utf-8" - content_type = response_data.getheader('content-type') + content_type = response_data.getheader("content-type") if content_type is not None: match = re.search(r"charset=([a-zA-Z\-\d]+)[\s\;]?", content_type) if match: encoding = match.group(1) response_data.data = response_data.data.decode(encoding) - return_data = self.deserialize( - response_data, - response_type, - _check_type - ) + return_data = self.deserialize(response_data, response_type, _check_type) else: return_data = None if _return_http_data_only: - return (return_data) + return return_data else: - return (return_data, response_data.status, - response_data.getheaders()) + return (return_data, response_data.status, response_data.getheaders()) def parameters_to_multipart(self, params, collection_types): """Get parameters as list of tuples, formatting as json if value is collection_types @@ -244,15 +232,15 @@ def parameters_to_multipart(self, params, collection_types): """ new_params = [] if collection_types is None: - collection_types = (dict) + collection_types = dict for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 - if isinstance(v, collection_types): # v is instance of collection_type, formatting as application/json - v = json.dumps(v, ensure_ascii=False).encode("utf-8") - field = RequestField(k, v) - field.make_multipart(content_type="application/json; charset=utf-8") - new_params.append(field) + if isinstance(v, collection_types): # v is instance of collection_type, formatting as application/json + v = json.dumps(v, ensure_ascii=False).encode("utf-8") + field = RequestField(k, v) + field.make_multipart(content_type="application/json; charset=utf-8") + new_params.append(field) else: - new_params.append((k, v)) + new_params.append((k, v)) return new_params @classmethod @@ -270,9 +258,7 @@ def sanitize_for_serialization(cls, obj): :return: The serialized form of data. """ if isinstance(obj, (ModelNormal, ModelComposed)): - return { - key: cls.sanitize_for_serialization(val) for key, val in model_to_dict(obj, serialize=True).items() - } + return {key: cls.sanitize_for_serialization(val) for key, val in model_to_dict(obj, serialize=True).items()} elif isinstance(obj, io.IOBase): return cls.get_file_data_and_close_file(obj) elif isinstance(obj, (str, int, float, none_type, bool)): @@ -285,7 +271,7 @@ def sanitize_for_serialization(cls, obj): return [cls.sanitize_for_serialization(item) for item in obj] if isinstance(obj, dict): return {key: cls.sanitize_for_serialization(val) for key, val in obj.items()} - raise PineconeApiValueError('Unable to prepare type {} for serialization'.format(obj.__class__.__name__)) + raise PineconeApiValueError("Unable to prepare type {} for serialization".format(obj.__class__.__name__)) def deserialize(self, response, response_type, _check_type): """Deserializes response into an object. @@ -311,8 +297,7 @@ def deserialize(self, response, response_type, _check_type): # save response body into a tmp file and return the instance if response_type == (file_type,): content_disposition = response.getheader("Content-Disposition") - return deserialize_file(response.data, self.configuration, - content_disposition=content_disposition) + return deserialize_file(response.data, self.configuration, content_disposition=content_disposition) # fetch data from response object try: @@ -323,12 +308,7 @@ def deserialize(self, response, response_type, _check_type): # store our data under the key of 'received_data' so users have some # context if they are deserializing a string and the data type is wrong deserialized_data = validate_and_convert_types( - received_data, - response_type, - ['received_data'], - True, - _check_type, - configuration=self.configuration + received_data, response_type, ["received_data"], True, _check_type, configuration=self.configuration ) return deserialized_data @@ -350,7 +330,7 @@ def call_api( _preload_content: bool = True, _request_timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None, _host: typing.Optional[str] = None, - _check_type: typing.Optional[bool] = None + _check_type: typing.Optional[bool] = None, ): """Makes the HTTP request (synchronous) and returns deserialized data. @@ -406,86 +386,127 @@ def call_api( then the method will return the response directly. """ if not async_req: - return self.__call_api(resource_path, method, - path_params, query_params, header_params, - body, post_params, files, - response_type, auth_settings, - _return_http_data_only, collection_formats, - _preload_content, _request_timeout, _host, - _check_type) - - return self.pool.apply_async(self.__call_api, (resource_path, - method, path_params, - query_params, - header_params, body, - post_params, files, - response_type, - auth_settings, - _return_http_data_only, - collection_formats, - _preload_content, - _request_timeout, - _host, _check_type)) - - def request(self, method, url, query_params=None, headers=None, - post_params=None, body=None, _preload_content=True, - _request_timeout=None): + return self.__call_api( + resource_path, + method, + path_params, + query_params, + header_params, + body, + post_params, + files, + response_type, + auth_settings, + _return_http_data_only, + collection_formats, + _preload_content, + _request_timeout, + _host, + _check_type, + ) + + return self.pool.apply_async( + self.__call_api, + ( + resource_path, + method, + path_params, + query_params, + header_params, + body, + post_params, + files, + response_type, + auth_settings, + _return_http_data_only, + collection_formats, + _preload_content, + _request_timeout, + _host, + _check_type, + ), + ) + + def request( + self, + method, + url, + query_params=None, + headers=None, + post_params=None, + body=None, + _preload_content=True, + _request_timeout=None, + ): """Makes the HTTP request using RESTClient.""" if method == "GET": - return self.rest_client.GET(url, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - headers=headers) + return self.rest_client.GET( + url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers, + ) elif method == "HEAD": - return self.rest_client.HEAD(url, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - headers=headers) + return self.rest_client.HEAD( + url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers, + ) elif method == "OPTIONS": - return self.rest_client.OPTIONS(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) + return self.rest_client.OPTIONS( + url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) elif method == "POST": - return self.rest_client.POST(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) + return self.rest_client.POST( + url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) elif method == "PUT": - return self.rest_client.PUT(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) + return self.rest_client.PUT( + url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) elif method == "PATCH": - return self.rest_client.PATCH(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) + return self.rest_client.PATCH( + url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) elif method == "DELETE": - return self.rest_client.DELETE(url, - query_params=query_params, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) + return self.rest_client.DELETE( + url, + query_params=query_params, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) else: raise PineconeApiValueError( - "http method must be `GET`, `HEAD`, `OPTIONS`," - " `POST`, `PATCH`, `PUT` or `DELETE`." + "http method must be `GET`, `HEAD`, `OPTIONS`," " `POST`, `PATCH`, `PUT` or `DELETE`." ) def parameters_to_tuples(self, params, collection_formats): @@ -501,19 +522,18 @@ def parameters_to_tuples(self, params, collection_formats): for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 if k in collection_formats: collection_format = collection_formats[k] - if collection_format == 'multi': + if collection_format == "multi": new_params.extend((k, value) for value in v) else: - if collection_format == 'ssv': - delimiter = ' ' - elif collection_format == 'tsv': - delimiter = '\t' - elif collection_format == 'pipes': - delimiter = '|' + if collection_format == "ssv": + delimiter = " " + elif collection_format == "tsv": + delimiter = "\t" + elif collection_format == "pipes": + delimiter = "|" else: # csv is the default - delimiter = ',' - new_params.append( - (k, delimiter.join(str(value) for value in v))) + delimiter = "," + new_params.append((k, delimiter.join(str(value) for value in v))) else: new_params.append((k, v)) return new_params @@ -545,15 +565,12 @@ def files_parameters(self, files: typing.Optional[typing.Dict[str, typing.List[i continue if file_instance.closed is True: raise PineconeApiValueError( - "Cannot read a closed file. The passed in file_type " - "for %s must be open." % param_name + "Cannot read a closed file. The passed in file_type " "for %s must be open." % param_name ) filename = os.path.basename(file_instance.name) filedata = self.get_file_data_and_close_file(file_instance) - mimetype = (mimetypes.guess_type(filename)[0] or - 'application/octet-stream') - params.append( - tuple([param_name, tuple([filename, filedata, mimetype])])) + mimetype = mimetypes.guess_type(filename)[0] or "application/octet-stream" + params.append(tuple([param_name, tuple([filename, filedata, mimetype])])) return params @@ -568,10 +585,10 @@ def select_header_accept(self, accepts): accepts = [x.lower() for x in accepts] - if 'application/json' in accepts: - return 'application/json' + if "application/json" in accepts: + return "application/json" else: - return ', '.join(accepts) + return ", ".join(accepts) def select_header_content_type(self, content_types): """Returns `Content-Type` based on an array of content_types provided. @@ -580,17 +597,16 @@ def select_header_content_type(self, content_types): :return: Content-Type (e.g. application/json). """ if not content_types: - return 'application/json' + return "application/json" content_types = [x.lower() for x in content_types] - if 'application/json' in content_types or '*/*' in content_types: - return 'application/json' + if "application/json" in content_types or "*/*" in content_types: + return "application/json" else: return content_types[0] - def update_params_for_auth(self, headers, querys, auth_settings, - resource_path, method, body): + def update_params_for_auth(self, headers, querys, auth_settings, resource_path, method, body): """Updates header and query params based on authentication setting. :param headers: Header parameters dict to be updated. @@ -607,22 +623,19 @@ def update_params_for_auth(self, headers, querys, auth_settings, for auth in auth_settings: auth_setting = self.configuration.auth_settings().get(auth) if auth_setting: - if auth_setting['in'] == 'cookie': - headers['Cookie'] = auth_setting['value'] - elif auth_setting['in'] == 'header': - if auth_setting['type'] != 'http-signature': - headers[auth_setting['key']] = auth_setting['value'] - elif auth_setting['in'] == 'query': - querys.append((auth_setting['key'], auth_setting['value'])) + if auth_setting["in"] == "cookie": + headers["Cookie"] = auth_setting["value"] + elif auth_setting["in"] == "header": + if auth_setting["type"] != "http-signature": + headers[auth_setting["key"]] = auth_setting["value"] + elif auth_setting["in"] == "query": + querys.append((auth_setting["key"], auth_setting["value"])) else: - raise PineconeApiValueError( - 'Authentication token must be in `query` or `header`' - ) + raise PineconeApiValueError("Authentication token must be in `query` or `header`") class Endpoint(object): - def __init__(self, settings=None, params_map=None, root_map=None, - headers_map=None, api_client=None, callable=None): + def __init__(self, settings=None, params_map=None, root_map=None, headers_map=None, api_client=None, callable=None): """Creates an endpoint Args: @@ -658,55 +671,50 @@ def __init__(self, settings=None, params_map=None, root_map=None, """ self.settings = settings self.params_map = params_map - self.params_map['all'].extend([ - 'async_req', - '_host_index', - '_preload_content', - '_request_timeout', - '_return_http_data_only', - '_check_input_type', - '_check_return_type' - ]) - self.params_map['nullable'].extend(['_request_timeout']) - self.validations = root_map['validations'] - self.allowed_values = root_map['allowed_values'] - self.openapi_types = root_map['openapi_types'] + self.params_map["all"].extend( + [ + "async_req", + "_host_index", + "_preload_content", + "_request_timeout", + "_return_http_data_only", + "_check_input_type", + "_check_return_type", + ] + ) + self.params_map["nullable"].extend(["_request_timeout"]) + self.validations = root_map["validations"] + self.allowed_values = root_map["allowed_values"] + self.openapi_types = root_map["openapi_types"] extra_types = { - 'async_req': (bool,), - '_host_index': (none_type, int), - '_preload_content': (bool,), - '_request_timeout': (none_type, float, (float,), [float], int, (int,), [int]), - '_return_http_data_only': (bool,), - '_check_input_type': (bool,), - '_check_return_type': (bool,) + "async_req": (bool,), + "_host_index": (none_type, int), + "_preload_content": (bool,), + "_request_timeout": (none_type, float, (float,), [float], int, (int,), [int]), + "_return_http_data_only": (bool,), + "_check_input_type": (bool,), + "_check_return_type": (bool,), } self.openapi_types.update(extra_types) - self.attribute_map = root_map['attribute_map'] - self.location_map = root_map['location_map'] - self.collection_format_map = root_map['collection_format_map'] + self.attribute_map = root_map["attribute_map"] + self.location_map = root_map["location_map"] + self.collection_format_map = root_map["collection_format_map"] self.headers_map = headers_map self.api_client = api_client self.callable = callable def __validate_inputs(self, kwargs): - for param in self.params_map['enum']: + for param in self.params_map["enum"]: if param in kwargs: - check_allowed_values( - self.allowed_values, - (param,), - kwargs[param] - ) + check_allowed_values(self.allowed_values, (param,), kwargs[param]) - for param in self.params_map['validation']: + for param in self.params_map["validation"]: if param in kwargs: check_validations( - self.validations, - (param,), - kwargs[param], - configuration=self.api_client.configuration + self.validations, (param,), kwargs[param], configuration=self.api_client.configuration ) - if kwargs['_check_input_type'] is False: + if kwargs["_check_input_type"] is False: return for key, value in kwargs.items(): @@ -715,51 +723,41 @@ def __validate_inputs(self, kwargs): self.openapi_types[key], [key], False, - kwargs['_check_input_type'], - configuration=self.api_client.configuration + kwargs["_check_input_type"], + configuration=self.api_client.configuration, ) kwargs[key] = fixed_val def __gather_params(self, kwargs): - params = { - 'body': None, - 'collection_format': {}, - 'file': {}, - 'form': [], - 'header': {}, - 'path': {}, - 'query': [] - } + params = {"body": None, "collection_format": {}, "file": {}, "form": [], "header": {}, "path": {}, "query": []} for param_name, param_value in kwargs.items(): param_location = self.location_map.get(param_name) if param_location is None: continue if param_location: - if param_location == 'body': - params['body'] = param_value + if param_location == "body": + params["body"] = param_value continue base_name = self.attribute_map[param_name] - if (param_location == 'form' and - self.openapi_types[param_name] == (file_type,)): - params['file'][param_name] = [param_value] - elif (param_location == 'form' and - self.openapi_types[param_name] == ([file_type],)): + if param_location == "form" and self.openapi_types[param_name] == (file_type,): + params["file"][param_name] = [param_value] + elif param_location == "form" and self.openapi_types[param_name] == ([file_type],): # param_value is already a list - params['file'][param_name] = param_value - elif param_location in {'form', 'query'}: + params["file"][param_name] = param_value + elif param_location in {"form", "query"}: param_value_full = (base_name, param_value) params[param_location].append(param_value_full) - if param_location not in {'form', 'query'}: + if param_location not in {"form", "query"}: params[param_location][base_name] = param_value collection_format = self.collection_format_map.get(param_name) if collection_format: - params['collection_format'][base_name] = collection_format + params["collection_format"][base_name] = collection_format return params def __call__(self, *args, **kwargs): - """ This method is invoked when endpoints are called + """This method is invoked when endpoints are called Example: api_instance = DataPlaneApi() @@ -772,79 +770,76 @@ def __call__(self, *args, **kwargs): return self.callable(self, *args, **kwargs) def call_with_http_info(self, **kwargs): - try: - index = self.api_client.configuration.server_operation_index.get( - self.settings['operation_id'], self.api_client.configuration.server_index - ) if kwargs['_host_index'] is None else kwargs['_host_index'] + index = ( + self.api_client.configuration.server_operation_index.get( + self.settings["operation_id"], self.api_client.configuration.server_index + ) + if kwargs["_host_index"] is None + else kwargs["_host_index"] + ) server_variables = self.api_client.configuration.server_operation_variables.get( - self.settings['operation_id'], self.api_client.configuration.server_variables + self.settings["operation_id"], self.api_client.configuration.server_variables ) _host = self.api_client.configuration.get_host_from_settings( - index, variables=server_variables, servers=self.settings['servers'] + index, variables=server_variables, servers=self.settings["servers"] ) except IndexError: - if self.settings['servers']: + if self.settings["servers"]: raise PineconeApiValueError( - "Invalid host index. Must be 0 <= index < %s" % - len(self.settings['servers']) + "Invalid host index. Must be 0 <= index < %s" % len(self.settings["servers"]) ) _host = None for key, value in kwargs.items(): - if key not in self.params_map['all']: + if key not in self.params_map["all"]: raise PineconeApiTypeError( - "Got an unexpected parameter '%s'" - " to method `%s`" % - (key, self.settings['operation_id']) + "Got an unexpected parameter '%s'" " to method `%s`" % (key, self.settings["operation_id"]) ) # only throw this nullable PineconeApiValueError if _check_input_type # is False, if _check_input_type==True we catch this case # in self.__validate_inputs - if (key not in self.params_map['nullable'] and value is None - and kwargs['_check_input_type'] is False): + if key not in self.params_map["nullable"] and value is None and kwargs["_check_input_type"] is False: raise PineconeApiValueError( "Value may not be None for non-nullable parameter `%s`" - " when calling `%s`" % - (key, self.settings['operation_id']) + " when calling `%s`" % (key, self.settings["operation_id"]) ) - for key in self.params_map['required']: + for key in self.params_map["required"]: if key not in kwargs.keys(): raise PineconeApiValueError( - "Missing the required parameter `%s` when calling " - "`%s`" % (key, self.settings['operation_id']) + "Missing the required parameter `%s` when calling " "`%s`" % (key, self.settings["operation_id"]) ) self.__validate_inputs(kwargs) params = self.__gather_params(kwargs) - accept_headers_list = self.headers_map['accept'] + accept_headers_list = self.headers_map["accept"] if accept_headers_list: - params['header']['Accept'] = self.api_client.select_header_accept( - accept_headers_list) + params["header"]["Accept"] = self.api_client.select_header_accept(accept_headers_list) - content_type_headers_list = self.headers_map['content_type'] + content_type_headers_list = self.headers_map["content_type"] if content_type_headers_list: - header_list = self.api_client.select_header_content_type( - content_type_headers_list) - params['header']['Content-Type'] = header_list + header_list = self.api_client.select_header_content_type(content_type_headers_list) + params["header"]["Content-Type"] = header_list return self.api_client.call_api( - self.settings['endpoint_path'], self.settings['http_method'], - params['path'], - params['query'], - params['header'], - body=params['body'], - post_params=params['form'], - files=params['file'], - response_type=self.settings['response_type'], - auth_settings=self.settings['auth'], - async_req=kwargs['async_req'], - _check_type=kwargs['_check_return_type'], - _return_http_data_only=kwargs['_return_http_data_only'], - _preload_content=kwargs['_preload_content'], - _request_timeout=kwargs['_request_timeout'], + self.settings["endpoint_path"], + self.settings["http_method"], + params["path"], + params["query"], + params["header"], + body=params["body"], + post_params=params["form"], + files=params["file"], + response_type=self.settings["response_type"], + auth_settings=self.settings["auth"], + async_req=kwargs["async_req"], + _check_type=kwargs["_check_return_type"], + _return_http_data_only=kwargs["_return_http_data_only"], + _preload_content=kwargs["_preload_content"], + _request_timeout=kwargs["_request_timeout"], _host=_host, - collection_formats=params['collection_format']) + collection_formats=params["collection_format"], + ) diff --git a/pinecone/core/client/apis/__init__.py b/pinecone/core/client/apis/__init__.py index fa03b412..39c5562e 100644 --- a/pinecone/core/client/apis/__init__.py +++ b/pinecone/core/client/apis/__init__.py @@ -1,4 +1,3 @@ - # flake8: noqa # Import all APIs into this package. diff --git a/pinecone/core/client/configuration.py b/pinecone/core/client/configuration.py index 2e7b2140..9ecbec75 100644 --- a/pinecone/core/client/configuration.py +++ b/pinecone/core/client/configuration.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import copy import logging import multiprocessing @@ -20,99 +19,112 @@ JSON_SCHEMA_VALIDATION_KEYWORDS = { - 'multipleOf', 'maximum', 'exclusiveMaximum', - 'minimum', 'exclusiveMinimum', 'maxLength', - 'minLength', 'pattern', 'maxItems', 'minItems' + "multipleOf", + "maximum", + "exclusiveMaximum", + "minimum", + "exclusiveMinimum", + "maxLength", + "minLength", + "pattern", + "maxItems", + "minItems", } + class Configuration(object): """NOTE: This class is auto generated by OpenAPI Generator - Ref: https://openapi-generator.tech - Do not edit the class manually. - - :param host: Base url - :param api_key: Dict to store API key(s). - Each entry in the dict specifies an API key. - The dict key is the name of the security scheme in the OAS specification. - The dict value is the API key secret. - :param api_key_prefix: Dict to store API prefix (e.g. Bearer) - The dict key is the name of the security scheme in the OAS specification. - The dict value is an API key prefix when generating the auth data. - :param username: Username for HTTP basic authentication - :param password: Password for HTTP basic authentication - :param discard_unknown_keys: Boolean value indicating whether to discard - unknown properties. A server may send a response that includes additional - properties that are not known by the client in the following scenarios: - 1. The OpenAPI document is incomplete, i.e. it does not match the server - implementation. - 2. The client was generated using an older version of the OpenAPI document - and the server has been upgraded since then. - If a schema in the OpenAPI document defines the additionalProperties attribute, - then all undeclared properties received by the server are injected into the - additional properties map. In that case, there are undeclared properties, and - nothing to discard. - :param disabled_client_side_validations (string): Comma-separated list of - JSON schema validation keywords to disable JSON schema structural validation - rules. The following keywords may be specified: multipleOf, maximum, - exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern, - maxItems, minItems. - By default, the validation is performed for data generated locally by the client - and data received from the server, independent of any validation performed by - the server side. If the input data does not satisfy the JSON schema validation - rules specified in the OpenAPI document, an exception is raised. - If disabled_client_side_validations is set, structural validation is - disabled. This can be useful to troubleshoot data validation problem, such as - when the OpenAPI document validation rules do not match the actual API data - received by the server. - :param server_index: Index to servers configuration. - :param server_variables: Mapping with string values to replace variables in - templated server configuration. The validation of enums is performed for - variables with defined enum values before. - :param server_operation_index: Mapping from operation ID to an index to server - configuration. - :param server_operation_variables: Mapping from operation ID to a mapping with - string values to replace variables in templated server configuration. - The validation of enums is performed for variables with defined enum values before. - :param ssl_ca_cert: str - the path to a file of concatenated CA certificates - in PEM format - - :Example: - - API Key Authentication Example. - Given the following security scheme in the OpenAPI specification: - components: - securitySchemes: - cookieAuth: # name for the security scheme - type: apiKey - in: cookie - name: JSESSIONID # cookie name - - You can programmatically set the cookie: - -conf = pinecone.core.client.Configuration( - api_key={'cookieAuth': 'abc123'} - api_key_prefix={'cookieAuth': 'JSESSIONID'} -) - - The following cookie will be added to the HTTP request: - Cookie: JSESSIONID abc123 + Ref: https://openapi-generator.tech + Do not edit the class manually. + + :param host: Base url + :param api_key: Dict to store API key(s). + Each entry in the dict specifies an API key. + The dict key is the name of the security scheme in the OAS specification. + The dict value is the API key secret. + :param api_key_prefix: Dict to store API prefix (e.g. Bearer) + The dict key is the name of the security scheme in the OAS specification. + The dict value is an API key prefix when generating the auth data. + :param username: Username for HTTP basic authentication + :param password: Password for HTTP basic authentication + :param discard_unknown_keys: Boolean value indicating whether to discard + unknown properties. A server may send a response that includes additional + properties that are not known by the client in the following scenarios: + 1. The OpenAPI document is incomplete, i.e. it does not match the server + implementation. + 2. The client was generated using an older version of the OpenAPI document + and the server has been upgraded since then. + If a schema in the OpenAPI document defines the additionalProperties attribute, + then all undeclared properties received by the server are injected into the + additional properties map. In that case, there are undeclared properties, and + nothing to discard. + :param disabled_client_side_validations (string): Comma-separated list of + JSON schema validation keywords to disable JSON schema structural validation + rules. The following keywords may be specified: multipleOf, maximum, + exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern, + maxItems, minItems. + By default, the validation is performed for data generated locally by the client + and data received from the server, independent of any validation performed by + the server side. If the input data does not satisfy the JSON schema validation + rules specified in the OpenAPI document, an exception is raised. + If disabled_client_side_validations is set, structural validation is + disabled. This can be useful to troubleshoot data validation problem, such as + when the OpenAPI document validation rules do not match the actual API data + received by the server. + :param server_index: Index to servers configuration. + :param server_variables: Mapping with string values to replace variables in + templated server configuration. The validation of enums is performed for + variables with defined enum values before. + :param server_operation_index: Mapping from operation ID to an index to server + configuration. + :param server_operation_variables: Mapping from operation ID to a mapping with + string values to replace variables in templated server configuration. + The validation of enums is performed for variables with defined enum values before. + :param ssl_ca_cert: str - the path to a file of concatenated CA certificates + in PEM format + + :Example: + + API Key Authentication Example. + Given the following security scheme in the OpenAPI specification: + components: + securitySchemes: + cookieAuth: # name for the security scheme + type: apiKey + in: cookie + name: JSESSIONID # cookie name + + You can programmatically set the cookie: + + conf = pinecone.core.client.Configuration( + api_key={'cookieAuth': 'abc123'} + api_key_prefix={'cookieAuth': 'JSESSIONID'} + ) + + The following cookie will be added to the HTTP request: + Cookie: JSESSIONID abc123 """ _default = None - def __init__(self, host=None, - api_key=None, api_key_prefix=None, - access_token=None, - username=None, password=None, - discard_unknown_keys=False, - disabled_client_side_validations="", - server_index=None, server_variables=None, - server_operation_index=None, server_operation_variables=None, - ssl_ca_cert=None, - ): - """Constructor - """ + def __init__( + self, + host=None, + api_key=None, + api_key_prefix=None, + access_token=None, + username=None, + password=None, + discard_unknown_keys=False, + disabled_client_side_validations="", + server_index=None, + server_variables=None, + server_operation_index=None, + server_operation_variables=None, + ssl_ca_cert=None, + ): + """Constructor""" self._base_path = "https://api.pinecone.io" if host is None else host """Default Base url """ @@ -155,7 +167,7 @@ def __init__(self, host=None, """ self.logger["package_logger"] = logging.getLogger("pinecone.core.client") self.logger["urllib3_logger"] = logging.getLogger("urllib3") - self.logger_format = '%(asctime)s %(levelname)s %(message)s' + self.logger_format = "%(asctime)s %(levelname)s %(message)s" """Log format """ self.logger_stream_handler = None @@ -203,7 +215,7 @@ def __init__(self, host=None, self.proxy_headers = None """Proxy headers """ - self.safe_chars_for_path_param = '' + self.safe_chars_for_path_param = "" """Safe chars for path_param """ self.retries = None @@ -220,7 +232,7 @@ def __deepcopy__(self, memo): result = cls.__new__(cls) memo[id(self)] = result for k, v in self.__dict__.items(): - if k not in ('logger', 'logger_file_handler'): + if k not in ("logger", "logger_file_handler"): setattr(result, k, copy.deepcopy(v, memo)) # shallow copy of loggers result.logger = copy.copy(self.logger) @@ -231,12 +243,11 @@ def __deepcopy__(self, memo): def __setattr__(self, name, value): object.__setattr__(self, name, value) - if name == 'disabled_client_side_validations': - s = set(filter(None, value.split(','))) + if name == "disabled_client_side_validations": + s = set(filter(None, value.split(","))) for v in s: if v not in JSON_SCHEMA_VALIDATION_KEYWORDS: - raise PineconeApiValueError( - "Invalid keyword: '{0}''".format(v)) + raise PineconeApiValueError("Invalid keyword: '{0}''".format(v)) self._disabled_client_side_validations = s @classmethod @@ -377,9 +388,7 @@ def get_basic_auth_token(self): password = "" if self.password is not None: password = self.password - return urllib3.util.make_headers( - basic_auth=username + ':' + password - ).get('authorization') + return urllib3.util.make_headers(basic_auth=username + ":" + password).get("authorization") def auth_settings(self): """Gets Auth Settings dict for api client. @@ -387,13 +396,13 @@ def auth_settings(self): :return: The Auth Settings information dict. """ auth = {} - if 'ApiKeyAuth' in self.api_key: - auth['ApiKeyAuth'] = { - 'type': 'api_key', - 'in': 'header', - 'key': 'Api-Key', - 'value': self.get_api_key_with_prefix( - 'ApiKeyAuth', + if "ApiKeyAuth" in self.api_key: + auth["ApiKeyAuth"] = { + "type": "api_key", + "in": "header", + "key": "Api-Key", + "value": self.get_api_key_with_prefix( + "ApiKeyAuth", ), } return auth @@ -403,12 +412,13 @@ def to_debug_report(self): :return: The report for debugging. """ - return "Python SDK Debug Report:\n"\ - "OS: {env}\n"\ - "Python Version: {pyversion}\n"\ - "Version of the API: v1\n"\ - "SDK Package Version: 1.0.0".\ - format(env=sys.platform, pyversion=sys.version) + return ( + "Python SDK Debug Report:\n" + "OS: {env}\n" + "Python Version: {pyversion}\n" + "Version of the API: v1\n" + "SDK Package Version: 1.0.0".format(env=sys.platform, pyversion=sys.version) + ) def get_host_settings(self): """Gets an array of host settings @@ -417,8 +427,8 @@ def get_host_settings(self): """ return [ { - 'url': "https://api.pinecone.io", - 'description': "Production API endpoints; unversioned legacy.", + "url": "https://api.pinecone.io", + "description": "Production API endpoints; unversioned legacy.", } ] @@ -440,22 +450,20 @@ def get_host_from_settings(self, index, variables=None, servers=None): except IndexError: raise ValueError( "Invalid index {0} when selecting the host settings. " - "Must be less than {1}".format(index, len(servers))) + "Must be less than {1}".format(index, len(servers)) + ) - url = server['url'] + url = server["url"] # go through variables and replace placeholders - for variable_name, variable in server.get('variables', {}).items(): - used_value = variables.get( - variable_name, variable['default_value']) + for variable_name, variable in server.get("variables", {}).items(): + used_value = variables.get(variable_name, variable["default_value"]) - if 'enum_values' in variable \ - and used_value not in variable['enum_values']: + if "enum_values" in variable and used_value not in variable["enum_values"]: raise ValueError( "The variable `{0}` in the host URL has invalid value " - "{1}. Must be {2}.".format( - variable_name, variables[variable_name], - variable['enum_values'])) + "{1}. Must be {2}.".format(variable_name, variables[variable_name], variable["enum_values"]) + ) url = url.replace("{" + variable_name + "}", used_value) diff --git a/pinecone/core/client/exceptions.py b/pinecone/core/client/exceptions.py index dc40885c..0cabf808 100644 --- a/pinecone/core/client/exceptions.py +++ b/pinecone/core/client/exceptions.py @@ -9,15 +9,13 @@ """ - class PineconeException(Exception): """The base exception class for all exceptions in the Pinecone Python SDK""" class PineconeApiTypeError(PineconeException, TypeError): - def __init__(self, msg, path_to_item=None, valid_classes=None, - key_type=None): - """ Raises an exception for TypeErrors + def __init__(self, msg, path_to_item=None, valid_classes=None, key_type=None): + """Raises an exception for TypeErrors Args: msg (str): the exception message @@ -98,7 +96,6 @@ def __init__(self, msg, path_to_item=None): class PineconeApiException(PineconeException): - def __init__(self, status=None, reason=None, http_resp=None): if http_resp: self.status = http_resp.status @@ -113,11 +110,9 @@ def __init__(self, status=None, reason=None, http_resp=None): def __str__(self): """Custom error messages for exception""" - error_message = "({0})\n"\ - "Reason: {1}\n".format(self.status, self.reason) + error_message = "({0})\n" "Reason: {1}\n".format(self.status, self.reason) if self.headers: - error_message += "HTTP response headers: {0}\n".format( - self.headers) + error_message += "HTTP response headers: {0}\n".format(self.headers) if self.body: error_message += "HTTP response body: {0}\n".format(self.body) @@ -126,25 +121,21 @@ def __str__(self): class NotFoundException(PineconeApiException): - def __init__(self, status=None, reason=None, http_resp=None): super(NotFoundException, self).__init__(status, reason, http_resp) class UnauthorizedException(PineconeApiException): - def __init__(self, status=None, reason=None, http_resp=None): super(UnauthorizedException, self).__init__(status, reason, http_resp) class ForbiddenException(PineconeApiException): - def __init__(self, status=None, reason=None, http_resp=None): super(ForbiddenException, self).__init__(status, reason, http_resp) class ServiceException(PineconeApiException): - def __init__(self, status=None, reason=None, http_resp=None): super(ServiceException, self).__init__(status, reason, http_resp) diff --git a/pinecone/core/client/model/collection_list.py b/pinecone/core/client/model/collection_list.py index 18b4e8e3..58078999 100644 --- a/pinecone/core/client/model/collection_list.py +++ b/pinecone/core/client/model/collection_list.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -32,7 +31,8 @@ def lazy_import(): from pinecone.core.client.model.collection_model import CollectionModel - globals()['CollectionModel'] = CollectionModel + + globals()["CollectionModel"] = CollectionModel class CollectionList(ModelNormal): @@ -59,11 +59,9 @@ class CollectionList(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -72,7 +70,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -88,20 +96,18 @@ def openapi_types(): """ lazy_import() return { - 'collections': ([CollectionModel],), # noqa: E501 + "collections": ([CollectionModel],), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'collections': 'collections', # noqa: E501 + "collections": "collections", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -144,17 +150,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 collections ([CollectionModel]): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -170,23 +177,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -226,15 +237,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 collections ([CollectionModel]): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -250,13 +262,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/collection_model.py b/pinecone/core/client/model/collection_model.py index d1ad1b75..b461d832 100644 --- a/pinecone/core/client/model/collection_model.py +++ b/pinecone/core/client/model/collection_model.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class CollectionModel(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -56,17 +54,17 @@ class CollectionModel(ModelNormal): """ allowed_values = { - ('status',): { - 'INITIALIZING': "Initializing", - 'READY': "Ready", - 'TERMINATING': "Terminating", + ("status",): { + "INITIALIZING": "Initializing", + "READY": "Ready", + "TERMINATING": "Terminating", }, } validations = { - ('dimension',): { - 'inclusive_maximum': 2000, - 'inclusive_minimum': 1, + ("dimension",): { + "inclusive_maximum": 2000, + "inclusive_minimum": 1, }, } @@ -76,7 +74,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -91,30 +99,28 @@ def openapi_types(): and the value is attribute type. """ return { - 'name': (str,), # noqa: E501 - 'status': (str,), # noqa: E501 - 'environment': (str,), # noqa: E501 - 'size': (int,), # noqa: E501 - 'dimension': (int,), # noqa: E501 - 'vector_count': (int,), # noqa: E501 + "name": (str,), # noqa: E501 + "status": (str,), # noqa: E501 + "environment": (str,), # noqa: E501 + "size": (int,), # noqa: E501 + "dimension": (int,), # noqa: E501 + "vector_count": (int,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'name': 'name', # noqa: E501 - 'status': 'status', # noqa: E501 - 'environment': 'environment', # noqa: E501 - 'size': 'size', # noqa: E501 - 'dimension': 'dimension', # noqa: E501 - 'vector_count': 'vector_count', # noqa: E501 + "name": "name", # noqa: E501 + "status": "status", # noqa: E501 + "environment": "environment", # noqa: E501 + "size": "size", # noqa: E501 + "dimension": "dimension", # noqa: E501 + "vector_count": "vector_count", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -164,17 +170,18 @@ def _from_openapi_data(cls, name, status, environment, *args, **kwargs): # noqa vector_count (int): The number of records stored in the collection.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -193,23 +200,27 @@ def _from_openapi_data(cls, name, status, environment, *args, **kwargs): # noqa self.status = status self.environment = environment for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, name, status, environment, *args, **kwargs): # noqa: E501 @@ -256,15 +267,16 @@ def __init__(self, name, status, environment, *args, **kwargs): # noqa: E501 vector_count (int): The number of records stored in the collection.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -283,13 +295,17 @@ def __init__(self, name, status, environment, *args, **kwargs): # noqa: E501 self.status = status self.environment = environment for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/configure_index_request.py b/pinecone/core/client/model/configure_index_request.py index 12ca8e23..cf3eea58 100644 --- a/pinecone/core/client/model/configure_index_request.py +++ b/pinecone/core/client/model/configure_index_request.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -32,7 +31,8 @@ def lazy_import(): from pinecone.core.client.model.configure_index_request_spec import ConfigureIndexRequestSpec - globals()['ConfigureIndexRequestSpec'] = ConfigureIndexRequestSpec + + globals()["ConfigureIndexRequestSpec"] = ConfigureIndexRequestSpec class ConfigureIndexRequest(ModelNormal): @@ -59,11 +59,9 @@ class ConfigureIndexRequest(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -72,7 +70,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -88,20 +96,18 @@ def openapi_types(): """ lazy_import() return { - 'spec': (ConfigureIndexRequestSpec,), # noqa: E501 + "spec": (ConfigureIndexRequestSpec,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'spec': 'spec', # noqa: E501 + "spec": "spec", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -146,17 +152,18 @@ def _from_openapi_data(cls, spec, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -173,23 +180,27 @@ def _from_openapi_data(cls, spec, *args, **kwargs): # noqa: E501 self.spec = spec for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, spec, *args, **kwargs): # noqa: E501 @@ -231,15 +242,16 @@ def __init__(self, spec, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -256,13 +268,17 @@ def __init__(self, spec, *args, **kwargs): # noqa: E501 self.spec = spec for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/configure_index_request_spec.py b/pinecone/core/client/model/configure_index_request_spec.py index ab49b779..e9285a60 100644 --- a/pinecone/core/client/model/configure_index_request_spec.py +++ b/pinecone/core/client/model/configure_index_request_spec.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -32,7 +31,8 @@ def lazy_import(): from pinecone.core.client.model.configure_index_request_spec_pod import ConfigureIndexRequestSpecPod - globals()['ConfigureIndexRequestSpecPod'] = ConfigureIndexRequestSpecPod + + globals()["ConfigureIndexRequestSpecPod"] = ConfigureIndexRequestSpecPod class ConfigureIndexRequestSpec(ModelNormal): @@ -59,11 +59,9 @@ class ConfigureIndexRequestSpec(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -72,7 +70,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -88,20 +96,18 @@ def openapi_types(): """ lazy_import() return { - 'pod': (ConfigureIndexRequestSpecPod,), # noqa: E501 + "pod": (ConfigureIndexRequestSpecPod,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'pod': 'pod', # noqa: E501 + "pod": "pod", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -146,17 +152,18 @@ def _from_openapi_data(cls, pod, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -173,23 +180,27 @@ def _from_openapi_data(cls, pod, *args, **kwargs): # noqa: E501 self.pod = pod for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, pod, *args, **kwargs): # noqa: E501 @@ -231,15 +242,16 @@ def __init__(self, pod, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -256,13 +268,17 @@ def __init__(self, pod, *args, **kwargs): # noqa: E501 self.pod = pod for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/configure_index_request_spec_pod.py b/pinecone/core/client/model/configure_index_request_spec_pod.py index 8e92a4c2..c0774a18 100644 --- a/pinecone/core/client/model/configure_index_request_spec_pod.py +++ b/pinecone/core/client/model/configure_index_request_spec_pod.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class ConfigureIndexRequestSpecPod(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -55,12 +53,11 @@ class ConfigureIndexRequestSpecPod(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} validations = { - ('replicas',): { - 'inclusive_minimum': 1, + ("replicas",): { + "inclusive_minimum": 1, }, } @@ -70,7 +67,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -85,22 +92,20 @@ def openapi_types(): and the value is attribute type. """ return { - 'replicas': (int,), # noqa: E501 - 'pod_type': (str,), # noqa: E501 + "replicas": (int,), # noqa: E501 + "pod_type": (str,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'replicas': 'replicas', # noqa: E501 - 'pod_type': 'pod_type', # noqa: E501 + "replicas": "replicas", # noqa: E501 + "pod_type": "pod_type", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -144,17 +149,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 pod_type (str): The type of pod to use. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`.. [optional] if omitted the server will use the default value of "p1.x1" # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -170,23 +176,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -227,15 +237,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 pod_type (str): The type of pod to use. One of `s1`, `p1`, or `p2` appended with `.` and one of `x1`, `x2`, `x4`, or `x8`.. [optional] if omitted the server will use the default value of "p1.x1" # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -251,13 +262,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/create_collection_request.py b/pinecone/core/client/model/create_collection_request.py index fdb0759f..93dd4b05 100644 --- a/pinecone/core/client/model/create_collection_request.py +++ b/pinecone/core/client/model/create_collection_request.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class CreateCollectionRequest(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -55,13 +53,12 @@ class CreateCollectionRequest(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} validations = { - ('name',): { - 'max_length': 45, - 'min_length': 1, + ("name",): { + "max_length": 45, + "min_length": 1, }, } @@ -71,7 +68,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -86,22 +93,20 @@ def openapi_types(): and the value is attribute type. """ return { - 'name': (str,), # noqa: E501 - 'source': (str,), # noqa: E501 + "name": (str,), # noqa: E501 + "source": (str,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'name': 'name', # noqa: E501 - 'source': 'source', # noqa: E501 + "name": "name", # noqa: E501 + "source": "source", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -111,7 +116,7 @@ def _from_openapi_data(cls, name, source, *args, **kwargs): # noqa: E501 """CreateCollectionRequest - a model defined in OpenAPI Args: - name (str): The name of the collection to be created. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. + name (str): The name of the collection to be created. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. source (str): The name of the index to be used as the source for the collection. Keyword Args: @@ -147,17 +152,18 @@ def _from_openapi_data(cls, name, source, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -175,30 +181,34 @@ def _from_openapi_data(cls, name, source, *args, **kwargs): # noqa: E501 self.name = name self.source = source for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, name, source, *args, **kwargs): # noqa: E501 """CreateCollectionRequest - a model defined in OpenAPI Args: - name (str): The name of the collection to be created. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. + name (str): The name of the collection to be created. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. source (str): The name of the index to be used as the source for the collection. Keyword Args: @@ -234,15 +244,16 @@ def __init__(self, name, source, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -260,13 +271,17 @@ def __init__(self, name, source, *args, **kwargs): # noqa: E501 self.name = name self.source = source for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/create_index_request.py b/pinecone/core/client/model/create_index_request.py index ef7c56a2..6ce5c4d2 100644 --- a/pinecone/core/client/model/create_index_request.py +++ b/pinecone/core/client/model/create_index_request.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class CreateIndexRequest(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -56,21 +54,21 @@ class CreateIndexRequest(ModelNormal): """ allowed_values = { - ('metric',): { - 'COSINE': "cosine", - 'EUCLIDEAN': "euclidean", - 'DOTPRODUCT': "dotproduct", + ("metric",): { + "COSINE": "cosine", + "EUCLIDEAN": "euclidean", + "DOTPRODUCT": "dotproduct", }, } validations = { - ('name',): { - 'max_length': 45, - 'min_length': 1, + ("name",): { + "max_length": 45, + "min_length": 1, }, - ('dimension',): { - 'inclusive_maximum': 20000, - 'inclusive_minimum': 1, + ("dimension",): { + "inclusive_maximum": 20000, + "inclusive_minimum": 1, }, } @@ -80,7 +78,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -95,26 +103,24 @@ def openapi_types(): and the value is attribute type. """ return { - 'name': (str,), # noqa: E501 - 'dimension': (int,), # noqa: E501 - 'spec': (dict,), # noqa: E501 - 'metric': (str,), # noqa: E501 + "name": (str,), # noqa: E501 + "dimension": (int,), # noqa: E501 + "spec": (dict,), # noqa: E501 + "metric": (str,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'name': 'name', # noqa: E501 - 'dimension': 'dimension', # noqa: E501 - 'spec': 'spec', # noqa: E501 - 'metric': 'metric', # noqa: E501 + "name": "name", # noqa: E501 + "dimension": "dimension", # noqa: E501 + "spec": "spec", # noqa: E501 + "metric": "metric", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -124,9 +130,9 @@ def _from_openapi_data(cls, name, dimension, spec, *args, **kwargs): # noqa: E5 """CreateIndexRequest - a model defined in OpenAPI Args: - name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. + name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. dimension (int): The dimensions of the vectors to be inserted in the index. - spec (dict): The spec object defines how the index should be deployed. For serverless indexes, you define only the cloud and region where the index should be hosted. For pod-based indexes, you define the environment where the index should be hosted, the pod type and size to use, and other index characteristics. Serverless indexes are in public preview and are available only on AWS in the us-west-2 region. Test thoroughly before using serverless indexes in production. + spec (dict): The spec object defines how the index should be deployed. For serverless indexes, you define only the cloud and region where the index should be hosted. For pod-based indexes, you define the environment where the index should be hosted, the pod type and size to use, and other index characteristics. Serverless indexes are in public preview and are available only on AWS in the us-west-2 region. Test thoroughly before using serverless indexes in production. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -162,17 +168,18 @@ def _from_openapi_data(cls, name, dimension, spec, *args, **kwargs): # noqa: E5 metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'.. [optional] if omitted the server will use the default value of "cosine" # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -191,32 +198,36 @@ def _from_openapi_data(cls, name, dimension, spec, *args, **kwargs): # noqa: E5 self.dimension = dimension self.spec = spec for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, name, dimension, spec, *args, **kwargs): # noqa: E501 """CreateIndexRequest - a model defined in OpenAPI Args: - name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. + name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. dimension (int): The dimensions of the vectors to be inserted in the index. - spec (dict): The spec object defines how the index should be deployed. For serverless indexes, you define only the cloud and region where the index should be hosted. For pod-based indexes, you define the environment where the index should be hosted, the pod type and size to use, and other index characteristics. Serverless indexes are in public preview and are available only on AWS in the us-west-2 region. Test thoroughly before using serverless indexes in production. + spec (dict): The spec object defines how the index should be deployed. For serverless indexes, you define only the cloud and region where the index should be hosted. For pod-based indexes, you define the environment where the index should be hosted, the pod type and size to use, and other index characteristics. Serverless indexes are in public preview and are available only on AWS in the us-west-2 region. Test thoroughly before using serverless indexes in production. Keyword Args: _check_type (bool): if True, values for parameters in openapi_types @@ -252,15 +263,16 @@ def __init__(self, name, dimension, spec, *args, **kwargs): # noqa: E501 metric (str): The distance metric to be used for similarity search. You can use 'euclidean', 'cosine', or 'dotproduct'.. [optional] if omitted the server will use the default value of "cosine" # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -279,13 +291,17 @@ def __init__(self, name, dimension, spec, *args, **kwargs): # noqa: E501 self.dimension = dimension self.spec = spec for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/delete_request.py b/pinecone/core/client/model/delete_request.py index b324c8d4..c1eb3d7a 100644 --- a/pinecone/core/client/model/delete_request.py +++ b/pinecone/core/client/model/delete_request.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class DeleteRequest(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -55,12 +53,10 @@ class DeleteRequest(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} validations = { - ('ids',): { - }, + ("ids",): {}, } @cached_property @@ -69,7 +65,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -84,26 +90,24 @@ def openapi_types(): and the value is attribute type. """ return { - 'ids': ([str],), # noqa: E501 - 'delete_all': (bool,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'filter': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 + "ids": ([str],), # noqa: E501 + "delete_all": (bool,), # noqa: E501 + "namespace": (str,), # noqa: E501 + "filter": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'ids': 'ids', # noqa: E501 - 'delete_all': 'deleteAll', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'filter': 'filter', # noqa: E501 + "ids": "ids", # noqa: E501 + "delete_all": "deleteAll", # noqa: E501 + "namespace": "namespace", # noqa: E501 + "filter": "filter", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -149,17 +153,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See https://www.pinecone.io/docs/metadata-filtering/.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -175,23 +180,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -234,15 +243,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): If specified, the metadata filter here will be used to select the vectors to delete. This is mutually exclusive with specifying ids to delete in the ids param or using delete_all=True. See https://www.pinecone.io/docs/metadata-filtering/.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -258,13 +268,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/describe_index_stats_request.py b/pinecone/core/client/model/describe_index_stats_request.py index 84d9fa4e..dffc9965 100644 --- a/pinecone/core/client/model/describe_index_stats_request.py +++ b/pinecone/core/client/model/describe_index_stats_request.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class DescribeIndexStatsRequest(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -55,11 +53,9 @@ class DescribeIndexStatsRequest(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -67,7 +63,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -82,20 +88,18 @@ def openapi_types(): and the value is attribute type. """ return { - 'filter': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 + "filter": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'filter': 'filter', # noqa: E501 + "filter": "filter", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -138,17 +142,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See https://www.pinecone.io/docs/metadata-filtering/.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -164,23 +169,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -220,15 +229,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): If this parameter is present, the operation only returns statistics for vectors that satisfy the filter. See https://www.pinecone.io/docs/metadata-filtering/.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -244,13 +254,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/describe_index_stats_response.py b/pinecone/core/client/model/describe_index_stats_response.py index af0fc6e9..38657628 100644 --- a/pinecone/core/client/model/describe_index_stats_response.py +++ b/pinecone/core/client/model/describe_index_stats_response.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -32,7 +31,8 @@ def lazy_import(): from pinecone.core.client.model.namespace_summary import NamespaceSummary - globals()['NamespaceSummary'] = NamespaceSummary + + globals()["NamespaceSummary"] = NamespaceSummary class DescribeIndexStatsResponse(ModelNormal): @@ -59,11 +59,9 @@ class DescribeIndexStatsResponse(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -72,7 +70,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -88,26 +96,24 @@ def openapi_types(): """ lazy_import() return { - 'namespaces': ({str: (NamespaceSummary,)},), # noqa: E501 - 'dimension': (int,), # noqa: E501 - 'index_fullness': (float,), # noqa: E501 - 'total_vector_count': (int,), # noqa: E501 + "namespaces": ({str: (NamespaceSummary,)},), # noqa: E501 + "dimension": (int,), # noqa: E501 + "index_fullness": (float,), # noqa: E501 + "total_vector_count": (int,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'namespaces': 'namespaces', # noqa: E501 - 'dimension': 'dimension', # noqa: E501 - 'index_fullness': 'indexFullness', # noqa: E501 - 'total_vector_count': 'totalVectorCount', # noqa: E501 + "namespaces": "namespaces", # noqa: E501 + "dimension": "dimension", # noqa: E501 + "index_fullness": "indexFullness", # noqa: E501 + "total_vector_count": "totalVectorCount", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -153,17 +159,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 total_vector_count (int): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -179,23 +186,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -238,15 +249,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 total_vector_count (int): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -262,13 +274,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/error_response.py b/pinecone/core/client/model/error_response.py index 8955f943..96b8bab0 100644 --- a/pinecone/core/client/model/error_response.py +++ b/pinecone/core/client/model/error_response.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -32,7 +31,8 @@ def lazy_import(): from pinecone.core.client.model.error_response_error import ErrorResponseError - globals()['ErrorResponseError'] = ErrorResponseError + + globals()["ErrorResponseError"] = ErrorResponseError class ErrorResponse(ModelNormal): @@ -59,11 +59,9 @@ class ErrorResponse(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -72,7 +70,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -88,22 +96,20 @@ def openapi_types(): """ lazy_import() return { - 'status': (int,), # noqa: E501 - 'error': (ErrorResponseError,), # noqa: E501 + "status": (int,), # noqa: E501 + "error": (ErrorResponseError,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'status': 'status', # noqa: E501 - 'error': 'error', # noqa: E501 + "status": "status", # noqa: E501 + "error": "error", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -149,17 +155,18 @@ def _from_openapi_data(cls, status, error, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -177,23 +184,27 @@ def _from_openapi_data(cls, status, error, *args, **kwargs): # noqa: E501 self.status = status self.error = error for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, status, error, *args, **kwargs): # noqa: E501 @@ -236,15 +247,16 @@ def __init__(self, status, error, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -262,13 +274,17 @@ def __init__(self, status, error, *args, **kwargs): # noqa: E501 self.status = status self.error = error for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/error_response_error.py b/pinecone/core/client/model/error_response_error.py index f21c1517..f6830199 100644 --- a/pinecone/core/client/model/error_response_error.py +++ b/pinecone/core/client/model/error_response_error.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class ErrorResponseError(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -56,30 +54,29 @@ class ErrorResponseError(ModelNormal): """ allowed_values = { - ('code',): { - 'OK': "OK", - 'UNKNOWN': "UNKNOWN", - 'INVALID_ARGUMENT': "INVALID_ARGUMENT", - 'DEADLINE_EXCEEDED': "DEADLINE_EXCEEDED", - 'QUOTA_EXCEEDED': "QUOTA_EXCEEDED", - 'NOT_FOUND': "NOT_FOUND", - 'ALREADY_EXISTS': "ALREADY_EXISTS", - 'PERMISSION_DENIED': "PERMISSION_DENIED", - 'UNAUTHENTICATED': "UNAUTHENTICATED", - 'RESOURCE_EXHAUSTED': "RESOURCE_EXHAUSTED", - 'FAILED_PRECONDITION': "FAILED_PRECONDITION", - 'ABORTED': "ABORTED", - 'OUT_OF_RANGE': "OUT_OF_RANGE", - 'UNIMPLEMENTED': "UNIMPLEMENTED", - 'INTERNAL': "INTERNAL", - 'UNAVAILABLE': "UNAVAILABLE", - 'DATA_LOSS': "DATA_LOSS", - 'FORBIDDEN': "FORBIDDEN", + ("code",): { + "OK": "OK", + "UNKNOWN": "UNKNOWN", + "INVALID_ARGUMENT": "INVALID_ARGUMENT", + "DEADLINE_EXCEEDED": "DEADLINE_EXCEEDED", + "QUOTA_EXCEEDED": "QUOTA_EXCEEDED", + "NOT_FOUND": "NOT_FOUND", + "ALREADY_EXISTS": "ALREADY_EXISTS", + "PERMISSION_DENIED": "PERMISSION_DENIED", + "UNAUTHENTICATED": "UNAUTHENTICATED", + "RESOURCE_EXHAUSTED": "RESOURCE_EXHAUSTED", + "FAILED_PRECONDITION": "FAILED_PRECONDITION", + "ABORTED": "ABORTED", + "OUT_OF_RANGE": "OUT_OF_RANGE", + "UNIMPLEMENTED": "UNIMPLEMENTED", + "INTERNAL": "INTERNAL", + "UNAVAILABLE": "UNAVAILABLE", + "DATA_LOSS": "DATA_LOSS", + "FORBIDDEN": "FORBIDDEN", }, } - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -87,7 +84,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -102,24 +109,22 @@ def openapi_types(): and the value is attribute type. """ return { - 'code': (str,), # noqa: E501 - 'message': (str,), # noqa: E501 - 'details': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 + "code": (str,), # noqa: E501 + "message": (str,), # noqa: E501 + "details": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'code': 'code', # noqa: E501 - 'message': 'message', # noqa: E501 - 'details': 'details', # noqa: E501 + "code": "code", # noqa: E501 + "message": "message", # noqa: E501 + "details": "details", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -166,17 +171,18 @@ def _from_openapi_data(cls, code, message, *args, **kwargs): # noqa: E501 details ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -194,23 +200,27 @@ def _from_openapi_data(cls, code, message, *args, **kwargs): # noqa: E501 self.code = code self.message = message for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, code, message, *args, **kwargs): # noqa: E501 @@ -254,15 +264,16 @@ def __init__(self, code, message, *args, **kwargs): # noqa: E501 details ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): Additional information about the error. This field is not guaranteed to be present.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -280,13 +291,17 @@ def __init__(self, code, message, *args, **kwargs): # noqa: E501 self.code = code self.message = message for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/fetch_response.py b/pinecone/core/client/model/fetch_response.py index cd96d02b..00cf0d6a 100644 --- a/pinecone/core/client/model/fetch_response.py +++ b/pinecone/core/client/model/fetch_response.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -33,8 +32,9 @@ def lazy_import(): from pinecone.core.client.model.usage import Usage from pinecone.core.client.model.vector import Vector - globals()['Usage'] = Usage - globals()['Vector'] = Vector + + globals()["Usage"] = Usage + globals()["Vector"] = Vector class FetchResponse(ModelNormal): @@ -61,11 +61,9 @@ class FetchResponse(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -74,7 +72,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -90,24 +98,22 @@ def openapi_types(): """ lazy_import() return { - 'vectors': ({str: (Vector,)},), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'usage': (Usage,), # noqa: E501 + "vectors": ({str: (Vector,)},), # noqa: E501 + "namespace": (str,), # noqa: E501 + "usage": (Usage,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'vectors': 'vectors', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'usage': 'usage', # noqa: E501 + "vectors": "vectors", # noqa: E501 + "namespace": "namespace", # noqa: E501 + "usage": "usage", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -152,17 +158,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 usage (Usage): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -178,23 +185,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -236,15 +247,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 usage (Usage): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -260,13 +272,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/index_list.py b/pinecone/core/client/model/index_list.py index 791972fb..250a1aa8 100644 --- a/pinecone/core/client/model/index_list.py +++ b/pinecone/core/client/model/index_list.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -32,7 +31,8 @@ def lazy_import(): from pinecone.core.client.model.index_model import IndexModel - globals()['IndexModel'] = IndexModel + + globals()["IndexModel"] = IndexModel class IndexList(ModelNormal): @@ -59,11 +59,9 @@ class IndexList(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -72,7 +70,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -88,20 +96,18 @@ def openapi_types(): """ lazy_import() return { - 'indexes': ([IndexModel],), # noqa: E501 + "indexes": ([IndexModel],), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'indexes': 'indexes', # noqa: E501 + "indexes": "indexes", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -144,17 +150,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 indexes ([IndexModel]): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -170,23 +177,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -226,15 +237,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 indexes ([IndexModel]): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -250,13 +262,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/index_model.py b/pinecone/core/client/model/index_model.py index 55c5bae1..1a6fdf73 100644 --- a/pinecone/core/client/model/index_model.py +++ b/pinecone/core/client/model/index_model.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -33,8 +32,9 @@ def lazy_import(): from pinecone.core.client.model.index_model_spec import IndexModelSpec from pinecone.core.client.model.index_model_status import IndexModelStatus - globals()['IndexModelSpec'] = IndexModelSpec - globals()['IndexModelStatus'] = IndexModelStatus + + globals()["IndexModelSpec"] = IndexModelSpec + globals()["IndexModelStatus"] = IndexModelStatus class IndexModel(ModelNormal): @@ -62,21 +62,21 @@ class IndexModel(ModelNormal): """ allowed_values = { - ('metric',): { - 'COSINE': "cosine", - 'EUCLIDEAN': "euclidean", - 'DOTPRODUCT': "dotproduct", + ("metric",): { + "COSINE": "cosine", + "EUCLIDEAN": "euclidean", + "DOTPRODUCT": "dotproduct", }, } validations = { - ('name',): { - 'max_length': 45, - 'min_length': 1, + ("name",): { + "max_length": 45, + "min_length": 1, }, - ('dimension',): { - 'inclusive_maximum': 20000, - 'inclusive_minimum': 1, + ("dimension",): { + "inclusive_maximum": 20000, + "inclusive_minimum": 1, }, } @@ -87,7 +87,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -103,30 +113,28 @@ def openapi_types(): """ lazy_import() return { - 'name': (str,), # noqa: E501 - 'dimension': (int,), # noqa: E501 - 'metric': (str,), # noqa: E501 - 'host': (str,), # noqa: E501 - 'spec': (IndexModelSpec,), # noqa: E501 - 'status': (IndexModelStatus,), # noqa: E501 + "name": (str,), # noqa: E501 + "dimension": (int,), # noqa: E501 + "metric": (str,), # noqa: E501 + "host": (str,), # noqa: E501 + "spec": (IndexModelSpec,), # noqa: E501 + "status": (IndexModelStatus,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'name': 'name', # noqa: E501 - 'dimension': 'dimension', # noqa: E501 - 'metric': 'metric', # noqa: E501 - 'host': 'host', # noqa: E501 - 'spec': 'spec', # noqa: E501 - 'status': 'status', # noqa: E501 + "name": "name", # noqa: E501 + "dimension": "dimension", # noqa: E501 + "metric": "metric", # noqa: E501 + "host": "host", # noqa: E501 + "spec": "spec", # noqa: E501 + "status": "status", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -136,7 +144,7 @@ def _from_openapi_data(cls, name, dimension, host, spec, status, *args, **kwargs """IndexModel - a model defined in OpenAPI Args: - name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. + name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. dimension (int): The dimensions of the vectors to be inserted in the index. host (str): The URL address where the index is hosted. spec (IndexModelSpec): @@ -176,18 +184,19 @@ def _from_openapi_data(cls, name, dimension, host, spec, status, *args, **kwargs _visited_composed_classes = (Animal,) """ - metric = kwargs.get('metric', "cosine") - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + metric = kwargs.get("metric", "cosine") + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -209,30 +218,34 @@ def _from_openapi_data(cls, name, dimension, host, spec, status, *args, **kwargs self.spec = spec self.status = status for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, name, dimension, host, spec, status, *args, **kwargs): # noqa: E501 """IndexModel - a model defined in OpenAPI Args: - name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. + name (str): The name of the index. Resource name must be 1-45 characters long, start and end with an alphanumeric character, and consist only of lower case alphanumeric characters or '-'. dimension (int): The dimensions of the vectors to be inserted in the index. host (str): The URL address where the index is hosted. spec (IndexModelSpec): @@ -272,16 +285,17 @@ def __init__(self, name, dimension, host, spec, status, *args, **kwargs): # noq _visited_composed_classes = (Animal,) """ - metric = kwargs.get('metric', "cosine") - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + metric = kwargs.get("metric", "cosine") + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -303,13 +317,17 @@ def __init__(self, name, dimension, host, spec, status, *args, **kwargs): # noq self.spec = spec self.status = status for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/index_model_spec.py b/pinecone/core/client/model/index_model_spec.py index 3a98bec7..9c8e63f1 100644 --- a/pinecone/core/client/model/index_model_spec.py +++ b/pinecone/core/client/model/index_model_spec.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -33,8 +32,9 @@ def lazy_import(): from pinecone.core.client.model.pod_spec import PodSpec from pinecone.core.client.model.serverless_spec import ServerlessSpec - globals()['PodSpec'] = PodSpec - globals()['ServerlessSpec'] = ServerlessSpec + + globals()["PodSpec"] = PodSpec + globals()["ServerlessSpec"] = ServerlessSpec class IndexModelSpec(ModelNormal): @@ -61,11 +61,9 @@ class IndexModelSpec(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -74,7 +72,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -90,22 +98,20 @@ def openapi_types(): """ lazy_import() return { - 'pod': (PodSpec,), # noqa: E501 - 'serverless': (ServerlessSpec,), # noqa: E501 + "pod": (PodSpec,), # noqa: E501 + "serverless": (ServerlessSpec,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'pod': 'pod', # noqa: E501 - 'serverless': 'serverless', # noqa: E501 + "pod": "pod", # noqa: E501 + "serverless": "serverless", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -149,17 +155,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 serverless (ServerlessSpec): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -175,23 +182,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -232,15 +243,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 serverless (ServerlessSpec): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -256,13 +268,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/index_model_status.py b/pinecone/core/client/model/index_model_status.py index 9bf736fa..ffc25f11 100644 --- a/pinecone/core/client/model/index_model_status.py +++ b/pinecone/core/client/model/index_model_status.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class IndexModelStatus(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -56,20 +54,19 @@ class IndexModelStatus(ModelNormal): """ allowed_values = { - ('state',): { - 'INITIALIZING': "Initializing", - 'INITIALIZATIONFAILED': "InitializationFailed", - 'SCALINGUP': "ScalingUp", - 'SCALINGDOWN': "ScalingDown", - 'SCALINGUPPODSIZE': "ScalingUpPodSize", - 'SCALINGDOWNPODSIZE': "ScalingDownPodSize", - 'TERMINATING': "Terminating", - 'READY': "Ready", + ("state",): { + "INITIALIZING": "Initializing", + "INITIALIZATIONFAILED": "InitializationFailed", + "SCALINGUP": "ScalingUp", + "SCALINGDOWN": "ScalingDown", + "SCALINGUPPODSIZE": "ScalingUpPodSize", + "SCALINGDOWNPODSIZE": "ScalingDownPodSize", + "TERMINATING": "Terminating", + "READY": "Ready", }, } - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -77,7 +74,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -92,22 +99,20 @@ def openapi_types(): and the value is attribute type. """ return { - 'ready': (bool,), # noqa: E501 - 'state': (str,), # noqa: E501 + "ready": (bool,), # noqa: E501 + "state": (str,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'ready': 'ready', # noqa: E501 - 'state': 'state', # noqa: E501 + "ready": "ready", # noqa: E501 + "state": "state", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -153,17 +158,18 @@ def _from_openapi_data(cls, ready, state, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -181,23 +187,27 @@ def _from_openapi_data(cls, ready, state, *args, **kwargs): # noqa: E501 self.ready = ready self.state = state for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, ready, state, *args, **kwargs): # noqa: E501 @@ -240,15 +250,16 @@ def __init__(self, ready, state, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -266,13 +277,17 @@ def __init__(self, ready, state, *args, **kwargs): # noqa: E501 self.ready = ready self.state = state for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/list_item.py b/pinecone/core/client/model/list_item.py index 17d0dabc..687c061b 100644 --- a/pinecone/core/client/model/list_item.py +++ b/pinecone/core/client/model/list_item.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class ListItem(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -55,11 +53,9 @@ class ListItem(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -67,7 +63,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -82,20 +88,18 @@ def openapi_types(): and the value is attribute type. """ return { - 'id': (str,), # noqa: E501 + "id": (str,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'id': 'id', # noqa: E501 + "id": "id", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -138,17 +142,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 id (str): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -164,23 +169,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -220,15 +229,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 id (str): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -244,13 +254,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/list_response.py b/pinecone/core/client/model/list_response.py index fd913b3c..9a2b3065 100644 --- a/pinecone/core/client/model/list_response.py +++ b/pinecone/core/client/model/list_response.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -34,9 +33,10 @@ def lazy_import(): from pinecone.core.client.model.list_item import ListItem from pinecone.core.client.model.pagination import Pagination from pinecone.core.client.model.usage import Usage - globals()['ListItem'] = ListItem - globals()['Pagination'] = Pagination - globals()['Usage'] = Usage + + globals()["ListItem"] = ListItem + globals()["Pagination"] = Pagination + globals()["Usage"] = Usage class ListResponse(ModelNormal): @@ -63,11 +63,9 @@ class ListResponse(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -76,7 +74,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -92,26 +100,24 @@ def openapi_types(): """ lazy_import() return { - 'vectors': ([ListItem],), # noqa: E501 - 'pagination': (Pagination,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'usage': (Usage,), # noqa: E501 + "vectors": ([ListItem],), # noqa: E501 + "pagination": (Pagination,), # noqa: E501 + "namespace": (str,), # noqa: E501 + "usage": (Usage,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'vectors': 'vectors', # noqa: E501 - 'pagination': 'pagination', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'usage': 'usage', # noqa: E501 + "vectors": "vectors", # noqa: E501 + "pagination": "pagination", # noqa: E501 + "namespace": "namespace", # noqa: E501 + "usage": "usage", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -157,17 +163,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 usage (Usage): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -183,23 +190,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -242,15 +253,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 usage (Usage): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -266,13 +278,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/namespace_summary.py b/pinecone/core/client/model/namespace_summary.py index c7f86079..9105515e 100644 --- a/pinecone/core/client/model/namespace_summary.py +++ b/pinecone/core/client/model/namespace_summary.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class NamespaceSummary(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -55,11 +53,9 @@ class NamespaceSummary(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -67,7 +63,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -82,20 +88,18 @@ def openapi_types(): and the value is attribute type. """ return { - 'vector_count': (int,), # noqa: E501 + "vector_count": (int,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'vector_count': 'vectorCount', # noqa: E501 + "vector_count": "vectorCount", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -138,17 +142,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 vector_count (int): The number of vectors stored in this namespace. Note that updates to this field may lag behind updates to the underlying index and corresponding query results, etc.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -164,23 +169,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -220,15 +229,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 vector_count (int): The number of vectors stored in this namespace. Note that updates to this field may lag behind updates to the underlying index and corresponding query results, etc.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -244,13 +254,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/pagination.py b/pinecone/core/client/model/pagination.py index ff57a840..5f0836dd 100644 --- a/pinecone/core/client/model/pagination.py +++ b/pinecone/core/client/model/pagination.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class Pagination(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -55,11 +53,9 @@ class Pagination(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -67,7 +63,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -82,20 +88,18 @@ def openapi_types(): and the value is attribute type. """ return { - 'next': (str,), # noqa: E501 + "next": (str,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'next': 'next', # noqa: E501 + "next": "next", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -138,17 +142,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 next (str): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -164,23 +169,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -220,15 +229,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 next (str): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -244,13 +254,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/pod_spec.py b/pinecone/core/client/model/pod_spec.py index f1b338b0..1f408af9 100644 --- a/pinecone/core/client/model/pod_spec.py +++ b/pinecone/core/client/model/pod_spec.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -32,7 +31,8 @@ def lazy_import(): from pinecone.core.client.model.pod_spec_metadata_config import PodSpecMetadataConfig - globals()['PodSpecMetadataConfig'] = PodSpecMetadataConfig + + globals()["PodSpecMetadataConfig"] = PodSpecMetadataConfig class PodSpec(ModelNormal): @@ -59,18 +59,17 @@ class PodSpec(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} validations = { - ('replicas',): { - 'inclusive_minimum': 1, + ("replicas",): { + "inclusive_minimum": 1, }, - ('shards',): { - 'inclusive_minimum': 1, + ("shards",): { + "inclusive_minimum": 1, }, - ('pods',): { - 'inclusive_minimum': 1, + ("pods",): { + "inclusive_minimum": 1, }, } @@ -81,7 +80,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -97,32 +106,30 @@ def openapi_types(): """ lazy_import() return { - 'environment': (str,), # noqa: E501 - 'replicas': (int,), # noqa: E501 - 'shards': (int,), # noqa: E501 - 'pod_type': (str,), # noqa: E501 - 'pods': (int,), # noqa: E501 - 'metadata_config': (PodSpecMetadataConfig,), # noqa: E501 - 'source_collection': (str,), # noqa: E501 + "environment": (str,), # noqa: E501 + "replicas": (int,), # noqa: E501 + "shards": (int,), # noqa: E501 + "pod_type": (str,), # noqa: E501 + "pods": (int,), # noqa: E501 + "metadata_config": (PodSpecMetadataConfig,), # noqa: E501 + "source_collection": (str,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'environment': 'environment', # noqa: E501 - 'replicas': 'replicas', # noqa: E501 - 'shards': 'shards', # noqa: E501 - 'pod_type': 'pod_type', # noqa: E501 - 'pods': 'pods', # noqa: E501 - 'metadata_config': 'metadata_config', # noqa: E501 - 'source_collection': 'source_collection', # noqa: E501 + "environment": "environment", # noqa: E501 + "replicas": "replicas", # noqa: E501 + "shards": "shards", # noqa: E501 + "pod_type": "pod_type", # noqa: E501 + "pods": "pods", # noqa: E501 + "metadata_config": "metadata_config", # noqa: E501 + "source_collection": "source_collection", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -173,21 +180,22 @@ def _from_openapi_data(cls, environment, *args, **kwargs): # noqa: E501 source_collection (str): The name of the collection to be used as the source for the index.. [optional] # noqa: E501 """ - replicas = kwargs.get('replicas', 1) - shards = kwargs.get('shards', 1) - pod_type = kwargs.get('pod_type', "p1.x1") - pods = kwargs.get('pods', 1) - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + replicas = kwargs.get("replicas", 1) + shards = kwargs.get("shards", 1) + pod_type = kwargs.get("pod_type", "p1.x1") + pods = kwargs.get("pods", 1) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -208,23 +216,27 @@ def _from_openapi_data(cls, environment, *args, **kwargs): # noqa: E501 self.pod_type = pod_type self.pods = pods for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, environment, *args, **kwargs): # noqa: E501 @@ -272,19 +284,20 @@ def __init__(self, environment, *args, **kwargs): # noqa: E501 source_collection (str): The name of the collection to be used as the source for the index.. [optional] # noqa: E501 """ - replicas = kwargs.get('replicas', 1) - shards = kwargs.get('shards', 1) - pod_type = kwargs.get('pod_type', "p1.x1") - pods = kwargs.get('pods', 1) - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + replicas = kwargs.get("replicas", 1) + shards = kwargs.get("shards", 1) + pod_type = kwargs.get("pod_type", "p1.x1") + pods = kwargs.get("pods", 1) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -305,13 +318,17 @@ def __init__(self, environment, *args, **kwargs): # noqa: E501 self.pod_type = pod_type self.pods = pods for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/pod_spec_metadata_config.py b/pinecone/core/client/model/pod_spec_metadata_config.py index 2e63659f..1b3f3574 100644 --- a/pinecone/core/client/model/pod_spec_metadata_config.py +++ b/pinecone/core/client/model/pod_spec_metadata_config.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class PodSpecMetadataConfig(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -55,11 +53,9 @@ class PodSpecMetadataConfig(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -67,7 +63,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -82,20 +88,18 @@ def openapi_types(): and the value is attribute type. """ return { - 'indexed': ([str],), # noqa: E501 + "indexed": ([str],), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'indexed': 'indexed', # noqa: E501 + "indexed": "indexed", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -138,17 +142,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 indexed ([str]): By default, all metadata is indexed; to change this behavior, use this property to specify an array of metadata fields that should be indexed.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -164,23 +169,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -220,15 +229,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 indexed ([str]): By default, all metadata is indexed; to change this behavior, use this property to specify an array of metadata fields that should be indexed.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -244,13 +254,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/protobuf_any.py b/pinecone/core/client/model/protobuf_any.py index e35f81eb..3a0c4042 100644 --- a/pinecone/core/client/model/protobuf_any.py +++ b/pinecone/core/client/model/protobuf_any.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class ProtobufAny(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -55,11 +53,9 @@ class ProtobufAny(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -67,7 +63,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -82,22 +88,20 @@ def openapi_types(): and the value is attribute type. """ return { - 'type_url': (str,), # noqa: E501 - 'value': (str,), # noqa: E501 + "type_url": (str,), # noqa: E501 + "value": (str,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'type_url': 'typeUrl', # noqa: E501 - 'value': 'value', # noqa: E501 + "type_url": "typeUrl", # noqa: E501 + "value": "value", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -141,17 +145,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 value (str): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -167,23 +172,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -224,15 +233,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 value (str): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -248,13 +258,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/protobuf_null_value.py b/pinecone/core/client/model/protobuf_null_value.py index 7b5b0f1d..8edc2ac4 100644 --- a/pinecone/core/client/model/protobuf_null_value.py +++ b/pinecone/core/client/model/protobuf_null_value.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class ProtobufNullValue(ModelSimple): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -52,13 +50,12 @@ class ProtobufNullValue(ModelSimple): """ allowed_values = { - ('value',): { - 'NULL_VALUE': "NULL_VALUE", + ("value",): { + "NULL_VALUE": "NULL_VALUE", }, } - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -66,7 +63,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -81,28 +88,29 @@ def openapi_types(): and the value is attribute type. """ return { - 'value': (str,), + "value": (str,), } @cached_property def discriminator(): return None - attribute_map = {} read_only_vars = set() _composed_schemas = None - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): @@ -147,24 +155,25 @@ def __init__(self, *args, **kwargs): _visited_composed_classes = (Animal,) """ # required up here when default value is not given - _path_to_item = kwargs.pop('_path_to_item', ()) + _path_to_item = kwargs.pop("_path_to_item", ()) - if 'value' in kwargs: - value = kwargs.pop('value') + if "value" in kwargs: + value = kwargs.pop("value") elif args: args = list(args) value = args.pop(0) else: value = "NULL_VALUE" - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -181,7 +190,8 @@ def __init__(self, *args, **kwargs): self.value = value if kwargs: raise PineconeApiTypeError( - "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % ( + "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." + % ( kwargs, self.__class__.__name__, ), @@ -233,26 +243,27 @@ def _from_openapi_data(cls, *args, **kwargs): _visited_composed_classes = (Animal,) """ # required up here when default value is not given - _path_to_item = kwargs.pop('_path_to_item', ()) + _path_to_item = kwargs.pop("_path_to_item", ()) self = super(OpenApiModel, cls).__new__(cls) - if 'value' in kwargs: - value = kwargs.pop('value') + if "value" in kwargs: + value = kwargs.pop("value") elif args: args = list(args) value = args.pop(0) else: value = "NULL_VALUE" - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -269,7 +280,8 @@ def _from_openapi_data(cls, *args, **kwargs): self.value = value if kwargs: raise PineconeApiTypeError( - "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % ( + "Invalid named arguments=%s passed to %s. Remove those invalid named arguments." + % ( kwargs, self.__class__.__name__, ), diff --git a/pinecone/core/client/model/query_request.py b/pinecone/core/client/model/query_request.py index 1e2c6466..8f410dbc 100644 --- a/pinecone/core/client/model/query_request.py +++ b/pinecone/core/client/model/query_request.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -33,8 +32,9 @@ def lazy_import(): from pinecone.core.client.model.query_vector import QueryVector from pinecone.core.client.model.sparse_values import SparseValues - globals()['QueryVector'] = QueryVector - globals()['SparseValues'] = SparseValues + + globals()["QueryVector"] = QueryVector + globals()["SparseValues"] = SparseValues class QueryRequest(ModelNormal): @@ -61,20 +61,17 @@ class QueryRequest(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} validations = { - ('top_k',): { - 'inclusive_maximum': 10000, - 'inclusive_minimum': 1, - }, - ('queries',): { + ("top_k",): { + "inclusive_maximum": 10000, + "inclusive_minimum": 1, }, - ('vector',): { - }, - ('id',): { - 'max_length': 512, + ("queries",): {}, + ("vector",): {}, + ("id",): { + "max_length": 512, }, } @@ -85,7 +82,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -101,36 +108,34 @@ def openapi_types(): """ lazy_import() return { - 'top_k': (int,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'filter': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 - 'include_values': (bool,), # noqa: E501 - 'include_metadata': (bool,), # noqa: E501 - 'queries': ([QueryVector],), # noqa: E501 - 'vector': ([float],), # noqa: E501 - 'sparse_vector': (SparseValues,), # noqa: E501 - 'id': (str,), # noqa: E501 + "top_k": (int,), # noqa: E501 + "namespace": (str,), # noqa: E501 + "filter": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 + "include_values": (bool,), # noqa: E501 + "include_metadata": (bool,), # noqa: E501 + "queries": ([QueryVector],), # noqa: E501 + "vector": ([float],), # noqa: E501 + "sparse_vector": (SparseValues,), # noqa: E501 + "id": (str,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'top_k': 'topK', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'filter': 'filter', # noqa: E501 - 'include_values': 'includeValues', # noqa: E501 - 'include_metadata': 'includeMetadata', # noqa: E501 - 'queries': 'queries', # noqa: E501 - 'vector': 'vector', # noqa: E501 - 'sparse_vector': 'sparseVector', # noqa: E501 - 'id': 'id', # noqa: E501 + "top_k": "topK", # noqa: E501 + "namespace": "namespace", # noqa: E501 + "filter": "filter", # noqa: E501 + "include_values": "includeValues", # noqa: E501 + "include_metadata": "includeMetadata", # noqa: E501 + "queries": "queries", # noqa: E501 + "vector": "vector", # noqa: E501 + "sparse_vector": "sparseVector", # noqa: E501 + "id": "id", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -183,17 +188,18 @@ def _from_openapi_data(cls, top_k, *args, **kwargs): # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -210,23 +216,27 @@ def _from_openapi_data(cls, top_k, *args, **kwargs): # noqa: E501 self.top_k = top_k for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, top_k, *args, **kwargs): # noqa: E501 @@ -276,15 +286,16 @@ def __init__(self, top_k, *args, **kwargs): # noqa: E501 id (str): The unique ID of the vector to be used as a query vector. Each `query()` request can contain only one of the parameters `queries`, `vector`, or `id`.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -301,13 +312,17 @@ def __init__(self, top_k, *args, **kwargs): # noqa: E501 self.top_k = top_k for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/query_response.py b/pinecone/core/client/model/query_response.py index 58a29ce3..06c9d0c1 100644 --- a/pinecone/core/client/model/query_response.py +++ b/pinecone/core/client/model/query_response.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -34,9 +33,10 @@ def lazy_import(): from pinecone.core.client.model.scored_vector import ScoredVector from pinecone.core.client.model.single_query_results import SingleQueryResults from pinecone.core.client.model.usage import Usage - globals()['ScoredVector'] = ScoredVector - globals()['SingleQueryResults'] = SingleQueryResults - globals()['Usage'] = Usage + + globals()["ScoredVector"] = ScoredVector + globals()["SingleQueryResults"] = SingleQueryResults + globals()["Usage"] = Usage class QueryResponse(ModelNormal): @@ -63,11 +63,9 @@ class QueryResponse(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -76,7 +74,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -92,26 +100,24 @@ def openapi_types(): """ lazy_import() return { - 'results': ([SingleQueryResults],), # noqa: E501 - 'matches': ([ScoredVector],), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'usage': (Usage,), # noqa: E501 + "results": ([SingleQueryResults],), # noqa: E501 + "matches": ([ScoredVector],), # noqa: E501 + "namespace": (str,), # noqa: E501 + "usage": (Usage,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'results': 'results', # noqa: E501 - 'matches': 'matches', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'usage': 'usage', # noqa: E501 + "results": "results", # noqa: E501 + "matches": "matches", # noqa: E501 + "namespace": "namespace", # noqa: E501 + "usage": "usage", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -157,17 +163,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 usage (Usage): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -183,23 +190,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -242,15 +253,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 usage (Usage): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -266,13 +278,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/query_vector.py b/pinecone/core/client/model/query_vector.py index eed194e6..f7ca7337 100644 --- a/pinecone/core/client/model/query_vector.py +++ b/pinecone/core/client/model/query_vector.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -32,7 +31,8 @@ def lazy_import(): from pinecone.core.client.model.sparse_values import SparseValues - globals()['SparseValues'] = SparseValues + + globals()["SparseValues"] = SparseValues class QueryVector(ModelNormal): @@ -59,15 +59,13 @@ class QueryVector(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} validations = { - ('values',): { - }, - ('top_k',): { - 'inclusive_maximum': 10000, - 'inclusive_minimum': 1, + ("values",): {}, + ("top_k",): { + "inclusive_maximum": 10000, + "inclusive_minimum": 1, }, } @@ -78,7 +76,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -94,28 +102,26 @@ def openapi_types(): """ lazy_import() return { - 'values': ([float],), # noqa: E501 - 'sparse_values': (SparseValues,), # noqa: E501 - 'top_k': (int,), # noqa: E501 - 'namespace': (str,), # noqa: E501 - 'filter': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 + "values": ([float],), # noqa: E501 + "sparse_values": (SparseValues,), # noqa: E501 + "top_k": (int,), # noqa: E501 + "namespace": (str,), # noqa: E501 + "filter": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'values': 'values', # noqa: E501 - 'sparse_values': 'sparseValues', # noqa: E501 - 'top_k': 'topK', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 - 'filter': 'filter', # noqa: E501 + "values": "values", # noqa: E501 + "sparse_values": "sparseValues", # noqa: E501 + "top_k": "topK", # noqa: E501 + "namespace": "namespace", # noqa: E501 + "filter": "filter", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -164,17 +170,18 @@ def _from_openapi_data(cls, values, *args, **kwargs): # noqa: E501 filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): An override for the metadata filter to apply. This replaces the request-level filter.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -191,23 +198,27 @@ def _from_openapi_data(cls, values, *args, **kwargs): # noqa: E501 self.values = values for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, values, *args, **kwargs): # noqa: E501 @@ -253,15 +264,16 @@ def __init__(self, values, *args, **kwargs): # noqa: E501 filter ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): An override for the metadata filter to apply. This replaces the request-level filter.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -278,13 +290,17 @@ def __init__(self, values, *args, **kwargs): # noqa: E501 self.values = values for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/rpc_status.py b/pinecone/core/client/model/rpc_status.py index 10f0b445..a2f422bf 100644 --- a/pinecone/core/client/model/rpc_status.py +++ b/pinecone/core/client/model/rpc_status.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -32,7 +31,8 @@ def lazy_import(): from pinecone.core.client.model.protobuf_any import ProtobufAny - globals()['ProtobufAny'] = ProtobufAny + + globals()["ProtobufAny"] = ProtobufAny class RpcStatus(ModelNormal): @@ -59,11 +59,9 @@ class RpcStatus(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -72,7 +70,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -88,24 +96,22 @@ def openapi_types(): """ lazy_import() return { - 'code': (int,), # noqa: E501 - 'message': (str,), # noqa: E501 - 'details': ([ProtobufAny],), # noqa: E501 + "code": (int,), # noqa: E501 + "message": (str,), # noqa: E501 + "details": ([ProtobufAny],), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'code': 'code', # noqa: E501 - 'message': 'message', # noqa: E501 - 'details': 'details', # noqa: E501 + "code": "code", # noqa: E501 + "message": "message", # noqa: E501 + "details": "details", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -150,17 +156,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 details ([ProtobufAny]): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -176,23 +183,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -234,15 +245,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 details ([ProtobufAny]): [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -258,13 +270,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/scored_vector.py b/pinecone/core/client/model/scored_vector.py index 9f92e1a4..60cad1ce 100644 --- a/pinecone/core/client/model/scored_vector.py +++ b/pinecone/core/client/model/scored_vector.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -32,7 +31,8 @@ def lazy_import(): from pinecone.core.client.model.sparse_values import SparseValues - globals()['SparseValues'] = SparseValues + + globals()["SparseValues"] = SparseValues class ScoredVector(ModelNormal): @@ -59,13 +59,12 @@ class ScoredVector(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} validations = { - ('id',): { - 'max_length': 512, - 'min_length': 1, + ("id",): { + "max_length": 512, + "min_length": 1, }, } @@ -76,7 +75,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -92,28 +101,26 @@ def openapi_types(): """ lazy_import() return { - 'id': (str,), # noqa: E501 - 'score': (float,), # noqa: E501 - 'values': ([float],), # noqa: E501 - 'sparse_values': (SparseValues,), # noqa: E501 - 'metadata': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 + "id": (str,), # noqa: E501 + "score": (float,), # noqa: E501 + "values": ([float],), # noqa: E501 + "sparse_values": (SparseValues,), # noqa: E501 + "metadata": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'id': 'id', # noqa: E501 - 'score': 'score', # noqa: E501 - 'values': 'values', # noqa: E501 - 'sparse_values': 'sparseValues', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 + "id": "id", # noqa: E501 + "score": "score", # noqa: E501 + "values": "values", # noqa: E501 + "sparse_values": "sparseValues", # noqa: E501 + "metadata": "metadata", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -162,17 +169,18 @@ def _from_openapi_data(cls, id, *args, **kwargs): # noqa: E501 metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): This is the metadata, if it is requested.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -189,23 +197,27 @@ def _from_openapi_data(cls, id, *args, **kwargs): # noqa: E501 self.id = id for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, id, *args, **kwargs): # noqa: E501 @@ -251,15 +263,16 @@ def __init__(self, id, *args, **kwargs): # noqa: E501 metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): This is the metadata, if it is requested.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -276,13 +289,17 @@ def __init__(self, id, *args, **kwargs): # noqa: E501 self.id = id for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/serverless_spec.py b/pinecone/core/client/model/serverless_spec.py index 15137c60..1415c018 100644 --- a/pinecone/core/client/model/serverless_spec.py +++ b/pinecone/core/client/model/serverless_spec.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class ServerlessSpec(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -56,15 +54,14 @@ class ServerlessSpec(ModelNormal): """ allowed_values = { - ('cloud',): { - 'GCP': "gcp", - 'AWS': "aws", - 'AZURE': "azure", + ("cloud",): { + "GCP": "gcp", + "AWS": "aws", + "AZURE": "azure", }, } - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -72,7 +69,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -87,22 +94,20 @@ def openapi_types(): and the value is attribute type. """ return { - 'cloud': (str,), # noqa: E501 - 'region': (str,), # noqa: E501 + "cloud": (str,), # noqa: E501 + "region": (str,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'cloud': 'cloud', # noqa: E501 - 'region': 'region', # noqa: E501 + "cloud": "cloud", # noqa: E501 + "region": "region", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -148,17 +153,18 @@ def _from_openapi_data(cls, cloud, region, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -176,23 +182,27 @@ def _from_openapi_data(cls, cloud, region, *args, **kwargs): # noqa: E501 self.cloud = cloud self.region = region for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, cloud, region, *args, **kwargs): # noqa: E501 @@ -235,15 +245,16 @@ def __init__(self, cloud, region, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -261,13 +272,17 @@ def __init__(self, cloud, region, *args, **kwargs): # noqa: E501 self.cloud = cloud self.region = region for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/single_query_results.py b/pinecone/core/client/model/single_query_results.py index b84b8ba8..88776062 100644 --- a/pinecone/core/client/model/single_query_results.py +++ b/pinecone/core/client/model/single_query_results.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -32,7 +31,8 @@ def lazy_import(): from pinecone.core.client.model.scored_vector import ScoredVector - globals()['ScoredVector'] = ScoredVector + + globals()["ScoredVector"] = ScoredVector class SingleQueryResults(ModelNormal): @@ -59,11 +59,9 @@ class SingleQueryResults(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -72,7 +70,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -88,22 +96,20 @@ def openapi_types(): """ lazy_import() return { - 'matches': ([ScoredVector],), # noqa: E501 - 'namespace': (str,), # noqa: E501 + "matches": ([ScoredVector],), # noqa: E501 + "namespace": (str,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'matches': 'matches', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 + "matches": "matches", # noqa: E501 + "namespace": "namespace", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -147,17 +153,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 namespace (str): The namespace for the vectors.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -173,23 +180,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -230,15 +241,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 namespace (str): The namespace for the vectors.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -254,13 +266,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/sparse_values.py b/pinecone/core/client/model/sparse_values.py index a001217b..b9650352 100644 --- a/pinecone/core/client/model/sparse_values.py +++ b/pinecone/core/client/model/sparse_values.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class SparseValues(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -55,14 +53,11 @@ class SparseValues(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} validations = { - ('indices',): { - }, - ('values',): { - }, + ("indices",): {}, + ("values",): {}, } @cached_property @@ -71,7 +66,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -86,22 +91,20 @@ def openapi_types(): and the value is attribute type. """ return { - 'indices': ([int],), # noqa: E501 - 'values': ([float],), # noqa: E501 + "indices": ([int],), # noqa: E501 + "values": ([float],), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'indices': 'indices', # noqa: E501 - 'values': 'values', # noqa: E501 + "indices": "indices", # noqa: E501 + "values": "values", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -147,17 +150,18 @@ def _from_openapi_data(cls, indices, values, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -175,23 +179,27 @@ def _from_openapi_data(cls, indices, values, *args, **kwargs): # noqa: E501 self.indices = indices self.values = values for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, indices, values, *args, **kwargs): # noqa: E501 @@ -234,15 +242,16 @@ def __init__(self, indices, values, *args, **kwargs): # noqa: E501 _visited_composed_classes = (Animal,) """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -260,13 +269,17 @@ def __init__(self, indices, values, *args, **kwargs): # noqa: E501 self.indices = indices self.values = values for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/update_request.py b/pinecone/core/client/model/update_request.py index fc94d031..da569e57 100644 --- a/pinecone/core/client/model/update_request.py +++ b/pinecone/core/client/model/update_request.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -32,7 +31,8 @@ def lazy_import(): from pinecone.core.client.model.sparse_values import SparseValues - globals()['SparseValues'] = SparseValues + + globals()["SparseValues"] = SparseValues class UpdateRequest(ModelNormal): @@ -59,16 +59,14 @@ class UpdateRequest(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} validations = { - ('id',): { - 'max_length': 512, - 'min_length': 1, - }, - ('values',): { + ("id",): { + "max_length": 512, + "min_length": 1, }, + ("values",): {}, } @cached_property @@ -78,7 +76,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -94,28 +102,26 @@ def openapi_types(): """ lazy_import() return { - 'id': (str,), # noqa: E501 - 'values': ([float],), # noqa: E501 - 'sparse_values': (SparseValues,), # noqa: E501 - 'set_metadata': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 - 'namespace': (str,), # noqa: E501 + "id": (str,), # noqa: E501 + "values": ([float],), # noqa: E501 + "sparse_values": (SparseValues,), # noqa: E501 + "set_metadata": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 + "namespace": (str,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'id': 'id', # noqa: E501 - 'values': 'values', # noqa: E501 - 'sparse_values': 'sparseValues', # noqa: E501 - 'set_metadata': 'setMetadata', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 + "id": "id", # noqa: E501 + "values": "values", # noqa: E501 + "sparse_values": "sparseValues", # noqa: E501 + "set_metadata": "setMetadata", # noqa: E501 + "namespace": "namespace", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -164,17 +170,18 @@ def _from_openapi_data(cls, id, *args, **kwargs): # noqa: E501 namespace (str): Namespace name where to update the vector.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -191,23 +198,27 @@ def _from_openapi_data(cls, id, *args, **kwargs): # noqa: E501 self.id = id for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, id, *args, **kwargs): # noqa: E501 @@ -253,15 +264,16 @@ def __init__(self, id, *args, **kwargs): # noqa: E501 namespace (str): Namespace name where to update the vector.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -278,13 +290,17 @@ def __init__(self, id, *args, **kwargs): # noqa: E501 self.id = id for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/upsert_request.py b/pinecone/core/client/model/upsert_request.py index 980f734c..72ae3cbd 100644 --- a/pinecone/core/client/model/upsert_request.py +++ b/pinecone/core/client/model/upsert_request.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -32,7 +31,8 @@ def lazy_import(): from pinecone.core.client.model.vector import Vector - globals()['Vector'] = Vector + + globals()["Vector"] = Vector class UpsertRequest(ModelNormal): @@ -59,12 +59,10 @@ class UpsertRequest(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} validations = { - ('vectors',): { - }, + ("vectors",): {}, } @cached_property @@ -74,7 +72,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -90,22 +98,20 @@ def openapi_types(): """ lazy_import() return { - 'vectors': ([Vector],), # noqa: E501 - 'namespace': (str,), # noqa: E501 + "vectors": ([Vector],), # noqa: E501 + "namespace": (str,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'vectors': 'vectors', # noqa: E501 - 'namespace': 'namespace', # noqa: E501 + "vectors": "vectors", # noqa: E501 + "namespace": "namespace", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -151,17 +157,18 @@ def _from_openapi_data(cls, vectors, *args, **kwargs): # noqa: E501 namespace (str): This is the namespace name where you upsert vectors.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -178,23 +185,27 @@ def _from_openapi_data(cls, vectors, *args, **kwargs): # noqa: E501 self.vectors = vectors for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, vectors, *args, **kwargs): # noqa: E501 @@ -237,15 +248,16 @@ def __init__(self, vectors, *args, **kwargs): # noqa: E501 namespace (str): This is the namespace name where you upsert vectors.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -262,13 +274,17 @@ def __init__(self, vectors, *args, **kwargs): # noqa: E501 self.vectors = vectors for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/upsert_response.py b/pinecone/core/client/model/upsert_response.py index 611ffb13..5e300828 100644 --- a/pinecone/core/client/model/upsert_response.py +++ b/pinecone/core/client/model/upsert_response.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class UpsertResponse(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -55,11 +53,9 @@ class UpsertResponse(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -67,7 +63,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -82,20 +88,18 @@ def openapi_types(): and the value is attribute type. """ return { - 'upserted_count': (int,), # noqa: E501 + "upserted_count": (int,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'upserted_count': 'upsertedCount', # noqa: E501 + "upserted_count": "upsertedCount", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -138,17 +142,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 upserted_count (int): The number of vectors upserted.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -164,23 +169,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -220,15 +229,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 upserted_count (int): The number of vectors upserted.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -244,13 +254,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/usage.py b/pinecone/core/client/model/usage.py index 9ba76546..45b4eca2 100644 --- a/pinecone/core/client/model/usage.py +++ b/pinecone/core/client/model/usage.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -30,7 +29,6 @@ from pinecone.core.client.exceptions import PineconeApiAttributeError - class Usage(ModelNormal): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech @@ -55,11 +53,9 @@ class Usage(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} - validations = { - } + validations = {} @cached_property def additional_properties_type(): @@ -67,7 +63,17 @@ def additional_properties_type(): This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -82,20 +88,18 @@ def openapi_types(): and the value is attribute type. """ return { - 'read_units': (int,), # noqa: E501 + "read_units": (int,), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'read_units': 'readUnits', # noqa: E501 + "read_units": "readUnits", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -138,17 +142,18 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 read_units (int): The number of read units consumed by this operation.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -164,23 +169,27 @@ def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 @@ -220,15 +229,16 @@ def __init__(self, *args, **kwargs): # noqa: E501 read_units (int): The number of read units consumed by this operation.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -244,13 +254,17 @@ def __init__(self, *args, **kwargs): # noqa: E501 self._visited_composed_classes = _visited_composed_classes + (self.__class__,) for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model/vector.py b/pinecone/core/client/model/vector.py index 63fed80e..f7f79919 100644 --- a/pinecone/core/client/model/vector.py +++ b/pinecone/core/client/model/vector.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import re # noqa: F401 import sys # noqa: F401 @@ -32,7 +31,8 @@ def lazy_import(): from pinecone.core.client.model.sparse_values import SparseValues - globals()['SparseValues'] = SparseValues + + globals()["SparseValues"] = SparseValues class Vector(ModelNormal): @@ -59,16 +59,14 @@ class Vector(ModelNormal): as additional properties values. """ - allowed_values = { - } + allowed_values = {} validations = { - ('id',): { - 'max_length': 512, - 'min_length': 1, - }, - ('values',): { + ("id",): { + "max_length": 512, + "min_length": 1, }, + ("values",): {}, } @cached_property @@ -78,7 +76,17 @@ def additional_properties_type(): of type self, this must run after the class is loaded """ lazy_import() - return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + return ( + bool, + date, + datetime, + dict, + float, + int, + list, + str, + none_type, + ) # noqa: E501 _nullable = False @@ -94,26 +102,24 @@ def openapi_types(): """ lazy_import() return { - 'id': (str,), # noqa: E501 - 'values': ([float],), # noqa: E501 - 'sparse_values': (SparseValues,), # noqa: E501 - 'metadata': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 + "id": (str,), # noqa: E501 + "values": ([float],), # noqa: E501 + "sparse_values": (SparseValues,), # noqa: E501 + "metadata": ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501 } @cached_property def discriminator(): return None - attribute_map = { - 'id': 'id', # noqa: E501 - 'values': 'values', # noqa: E501 - 'sparse_values': 'sparseValues', # noqa: E501 - 'metadata': 'metadata', # noqa: E501 + "id": "id", # noqa: E501 + "values": "values", # noqa: E501 + "sparse_values": "sparseValues", # noqa: E501 + "metadata": "metadata", # noqa: E501 } - read_only_vars = { - } + read_only_vars = {} _composed_schemas = {} @@ -161,17 +167,18 @@ def _from_openapi_data(cls, id, values, *args, **kwargs): # noqa: E501 metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): This is the metadata included in the request.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) self = super(OpenApiModel, cls).__new__(cls) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -189,23 +196,27 @@ def _from_openapi_data(cls, id, values, *args, **kwargs): # noqa: E501 self.id = id self.values = values for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) return self - required_properties = set([ - '_data_store', - '_check_type', - '_spec_property_naming', - '_path_to_item', - '_configuration', - '_visited_composed_classes', - ]) + required_properties = set( + [ + "_data_store", + "_check_type", + "_spec_property_naming", + "_path_to_item", + "_configuration", + "_visited_composed_classes", + ] + ) @convert_js_args_to_python_args def __init__(self, id, values, *args, **kwargs): # noqa: E501 @@ -250,15 +261,16 @@ def __init__(self, id, values, *args, **kwargs): # noqa: E501 metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): This is the metadata included in the request.. [optional] # noqa: E501 """ - _check_type = kwargs.pop('_check_type', True) - _spec_property_naming = kwargs.pop('_spec_property_naming', False) - _path_to_item = kwargs.pop('_path_to_item', ()) - _configuration = kwargs.pop('_configuration', None) - _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + _check_type = kwargs.pop("_check_type", True) + _spec_property_naming = kwargs.pop("_spec_property_naming", False) + _path_to_item = kwargs.pop("_path_to_item", ()) + _configuration = kwargs.pop("_configuration", None) + _visited_composed_classes = kwargs.pop("_visited_composed_classes", ()) if args: raise PineconeApiTypeError( - "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." + % ( args, self.__class__.__name__, ), @@ -276,13 +288,17 @@ def __init__(self, id, values, *args, **kwargs): # noqa: E501 self.id = id self.values = values for var_name, var_value in kwargs.items(): - if var_name not in self.attribute_map and \ - self._configuration is not None and \ - self._configuration.discard_unknown_keys and \ - self.additional_properties_type is None: + if ( + var_name not in self.attribute_map + and self._configuration is not None + and self._configuration.discard_unknown_keys + and self.additional_properties_type is None + ): # discard variable. continue setattr(self, var_name, var_value) if var_name in self.read_only_vars: - raise PineconeApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " - f"class with read only attributes.") + raise PineconeApiAttributeError( + f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes." + ) diff --git a/pinecone/core/client/model_utils.py b/pinecone/core/client/model_utils.py index a3572fbb..6874920a 100644 --- a/pinecone/core/client/model_utils.py +++ b/pinecone/core/client/model_utils.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - from datetime import date, datetime # noqa: F401 import inspect import io @@ -30,6 +29,7 @@ def convert_js_args_to_python_args(fn): from functools import wraps + @wraps(fn) def wrapped_init(_self, *args, **kwargs): """ @@ -37,10 +37,11 @@ def wrapped_init(_self, *args, **kwargs): parameter of a class method. During generation, `self` attributes are mapped to `_self` in models. Here, we name `_self` instead of `self` to avoid conflicts. """ - spec_property_naming = kwargs.get('_spec_property_naming', False) + spec_property_naming = kwargs.get("_spec_property_naming", False) if spec_property_naming: kwargs = change_keys_js_to_python(kwargs, _self if isinstance(_self, type) else _self.__class__) return fn(_self, *args, **kwargs) + return wrapped_init @@ -48,7 +49,7 @@ class cached_property(object): # this caches the result of the function call for fn with no inputs # use this as a decorator on function methods that you want converted # into cached properties - result_key = '_results' + result_key = "_results" def __init__(self, fn): self._fn = fn @@ -64,6 +65,7 @@ def __get__(self, instance, cls=None): PRIMITIVE_TYPES = (list, float, int, bool, str, file_type) + def allows_single_value_input(cls): """ This function returns True if the input composed schema model or any @@ -77,17 +79,15 @@ def allows_single_value_input(cls): - null TODO: lru_cache this """ - if ( - issubclass(cls, ModelSimple) or - cls in PRIMITIVE_TYPES - ): + if issubclass(cls, ModelSimple) or cls in PRIMITIVE_TYPES: return True elif issubclass(cls, ModelComposed): - if not cls._composed_schemas['oneOf']: + if not cls._composed_schemas["oneOf"]: return False - return any(allows_single_value_input(c) for c in cls._composed_schemas['oneOf']) + return any(allows_single_value_input(c) for c in cls._composed_schemas["oneOf"]) return False + def composed_model_input_classes(cls): """ This function returns a list of the possible models that can be accepted as @@ -102,11 +102,11 @@ def composed_model_input_classes(cls): else: return get_discriminated_classes(cls) elif issubclass(cls, ModelComposed): - if not cls._composed_schemas['oneOf']: + if not cls._composed_schemas["oneOf"]: return [] if cls.discriminator is None: input_classes = [] - for c in cls._composed_schemas['oneOf']: + for c in cls._composed_schemas["oneOf"]: input_classes.extend(composed_model_input_classes(c)) return input_classes else: @@ -129,45 +129,29 @@ def set_attribute(self, name, value): required_types_mixed = self.openapi_types[name] elif self.additional_properties_type is None: raise PineconeApiAttributeError( - "{0} has no attribute '{1}'".format( - type(self).__name__, name), - path_to_item + "{0} has no attribute '{1}'".format(type(self).__name__, name), path_to_item ) elif self.additional_properties_type is not None: required_types_mixed = self.additional_properties_type if get_simple_class(name) != str: - error_msg = type_error_message( - var_name=name, - var_value=name, - valid_classes=(str,), - key_type=True - ) - raise PineconeApiTypeError( - error_msg, - path_to_item=path_to_item, - valid_classes=(str,), - key_type=True - ) + error_msg = type_error_message(var_name=name, var_value=name, valid_classes=(str,), key_type=True) + raise PineconeApiTypeError(error_msg, path_to_item=path_to_item, valid_classes=(str,), key_type=True) if self._check_type: value = validate_and_convert_types( - value, required_types_mixed, path_to_item, self._spec_property_naming, - self._check_type, configuration=self._configuration) - if (name,) in self.allowed_values: - check_allowed_values( - self.allowed_values, - (name,), - value - ) - if (name,) in self.validations: - check_validations( - self.validations, - (name,), value, - self._configuration + required_types_mixed, + path_to_item, + self._spec_property_naming, + self._check_type, + configuration=self._configuration, ) - self.__dict__['_data_store'][name] = value + if (name,) in self.allowed_values: + check_allowed_values(self.allowed_values, (name,), value) + if (name,) in self.validations: + check_validations(self.validations, (name,), value, self._configuration) + self.__dict__["_data_store"][name] = value def __repr__(self): """For `print` and `pprint`""" @@ -201,12 +185,8 @@ def __new__(cls, *args, **kwargs): oneof_instance = get_oneof_instance(cls, model_kwargs, kwargs, model_arg=arg) return oneof_instance - - visited_composed_classes = kwargs.get('_visited_composed_classes', ()) - if ( - cls.discriminator is None or - cls in visited_composed_classes - ): + visited_composed_classes = kwargs.get("_visited_composed_classes", ()) + if cls.discriminator is None or cls in visited_composed_classes: # Use case 1: this openapi schema (cls) does not have a discriminator # Use case 2: we have already visited this class before and are sure that we # want to instantiate it this time. We have visited this class deserializing @@ -236,28 +216,24 @@ def __new__(cls, *args, **kwargs): discr_value = kwargs[discr_propertyname_py] else: # The input data does not contain the discriminator property. - path_to_item = kwargs.get('_path_to_item', ()) + path_to_item = kwargs.get("_path_to_item", ()) raise PineconeApiValueError( "Cannot deserialize input data due to missing discriminator. " - "The discriminator property '%s' is missing at path: %s" % - (discr_propertyname_js, path_to_item) + "The discriminator property '%s' is missing at path: %s" % (discr_propertyname_js, path_to_item) ) # Implementation note: the last argument to get_discriminator_class # is a list of visited classes. get_discriminator_class may recursively # call itself and update the list of visited classes, and the initial # value must be an empty list. Hence not using 'visited_composed_classes' - new_cls = get_discriminator_class( - cls, discr_propertyname_py, discr_value, []) + new_cls = get_discriminator_class(cls, discr_propertyname_py, discr_value, []) if new_cls is None: - path_to_item = kwargs.get('_path_to_item', ()) - disc_prop_value = kwargs.get( - discr_propertyname_js, kwargs.get(discr_propertyname_py)) + path_to_item = kwargs.get("_path_to_item", ()) + disc_prop_value = kwargs.get(discr_propertyname_js, kwargs.get(discr_propertyname_py)) raise PineconeApiValueError( "Cannot deserialize input data due to invalid discriminator " "value. The OpenAPI document has no mapping for discriminator " - "property '%s'='%s' at path: %s" % - (discr_propertyname_js, disc_prop_value, path_to_item) + "property '%s'='%s' at path: %s" % (discr_propertyname_js, disc_prop_value, path_to_item) ) if new_cls in visited_composed_classes: @@ -282,13 +258,11 @@ def __new__(cls, *args, **kwargs): # Build a list containing all oneOf and anyOf descendants. oneof_anyof_classes = None if cls._composed_schemas is not None: - oneof_anyof_classes = ( - cls._composed_schemas.get('oneOf', ()) + - cls._composed_schemas.get('anyOf', ())) + oneof_anyof_classes = cls._composed_schemas.get("oneOf", ()) + cls._composed_schemas.get("anyOf", ()) oneof_anyof_child = new_cls in oneof_anyof_classes - kwargs['_visited_composed_classes'] = visited_composed_classes + (cls,) + kwargs["_visited_composed_classes"] = visited_composed_classes + (cls,) - if cls._composed_schemas.get('allOf') and oneof_anyof_child: + if cls._composed_schemas.get("allOf") and oneof_anyof_child: # Validate that we can make self because when we make the # new_cls it will not include the allOf validations in self self_inst = super(OpenApiModel, cls).__new__(cls) @@ -298,7 +272,6 @@ def __new__(cls, *args, **kwargs): new_inst.__init__(*args, **kwargs) return new_inst - @classmethod @convert_js_args_to_python_args def _new_from_openapi_data(cls, *args, **kwargs): @@ -317,12 +290,8 @@ def _new_from_openapi_data(cls, *args, **kwargs): oneof_instance = get_oneof_instance(cls, model_kwargs, kwargs, model_arg=arg) return oneof_instance - - visited_composed_classes = kwargs.get('_visited_composed_classes', ()) - if ( - cls.discriminator is None or - cls in visited_composed_classes - ): + visited_composed_classes = kwargs.get("_visited_composed_classes", ()) + if cls.discriminator is None or cls in visited_composed_classes: # Use case 1: this openapi schema (cls) does not have a discriminator # Use case 2: we have already visited this class before and are sure that we # want to instantiate it this time. We have visited this class deserializing @@ -352,28 +321,24 @@ def _new_from_openapi_data(cls, *args, **kwargs): discr_value = kwargs[discr_propertyname_py] else: # The input data does not contain the discriminator property. - path_to_item = kwargs.get('_path_to_item', ()) + path_to_item = kwargs.get("_path_to_item", ()) raise PineconeApiValueError( "Cannot deserialize input data due to missing discriminator. " - "The discriminator property '%s' is missing at path: %s" % - (discr_propertyname_js, path_to_item) + "The discriminator property '%s' is missing at path: %s" % (discr_propertyname_js, path_to_item) ) # Implementation note: the last argument to get_discriminator_class # is a list of visited classes. get_discriminator_class may recursively # call itself and update the list of visited classes, and the initial # value must be an empty list. Hence not using 'visited_composed_classes' - new_cls = get_discriminator_class( - cls, discr_propertyname_py, discr_value, []) + new_cls = get_discriminator_class(cls, discr_propertyname_py, discr_value, []) if new_cls is None: - path_to_item = kwargs.get('_path_to_item', ()) - disc_prop_value = kwargs.get( - discr_propertyname_js, kwargs.get(discr_propertyname_py)) + path_to_item = kwargs.get("_path_to_item", ()) + disc_prop_value = kwargs.get(discr_propertyname_js, kwargs.get(discr_propertyname_py)) raise PineconeApiValueError( "Cannot deserialize input data due to invalid discriminator " "value. The OpenAPI document has no mapping for discriminator " - "property '%s'='%s' at path: %s" % - (discr_propertyname_js, disc_prop_value, path_to_item) + "property '%s'='%s' at path: %s" % (discr_propertyname_js, disc_prop_value, path_to_item) ) if new_cls in visited_composed_classes: @@ -398,18 +363,15 @@ def _new_from_openapi_data(cls, *args, **kwargs): # Build a list containing all oneOf and anyOf descendants. oneof_anyof_classes = None if cls._composed_schemas is not None: - oneof_anyof_classes = ( - cls._composed_schemas.get('oneOf', ()) + - cls._composed_schemas.get('anyOf', ())) + oneof_anyof_classes = cls._composed_schemas.get("oneOf", ()) + cls._composed_schemas.get("anyOf", ()) oneof_anyof_child = new_cls in oneof_anyof_classes - kwargs['_visited_composed_classes'] = visited_composed_classes + (cls,) + kwargs["_visited_composed_classes"] = visited_composed_classes + (cls,) - if cls._composed_schemas.get('allOf') and oneof_anyof_child: + if cls._composed_schemas.get("allOf") and oneof_anyof_child: # Validate that we can make self because when we make the # new_cls it will not include the allOf validations in self self_inst = cls._from_openapi_data(*args, **kwargs) - new_inst = new_cls._new_from_openapi_data(*args, **kwargs) return new_inst @@ -431,7 +393,7 @@ def get(self, name, default=None): if name in self.required_properties: return self.__dict__[name] - return self.__dict__['_data_store'].get(name, default) + return self.__dict__["_data_store"].get(name, default) def __getitem__(self, name): """get the value of an attribute using square-bracket notation: `instance[attr]`""" @@ -439,9 +401,7 @@ def __getitem__(self, name): return self.get(name) raise PineconeApiAttributeError( - "{0} has no attribute '{1}'".format( - type(self).__name__, name), - [e for e in [self._path_to_item, name] if e] + "{0} has no attribute '{1}'".format(type(self).__name__, name), [e for e in [self._path_to_item, name] if e] ) def __contains__(self, name): @@ -449,7 +409,7 @@ def __contains__(self, name): if name in self.required_properties: return name in self.__dict__ - return name in self.__dict__['_data_store'] + return name in self.__dict__["_data_store"] def to_str(self): """Returns the string representation of the model""" @@ -460,8 +420,8 @@ def __eq__(self, other): if not isinstance(other, self.__class__): return False - this_val = self._data_store['value'] - that_val = other._data_store['value'] + this_val = self._data_store["value"] + that_val = other._data_store["value"] types = set() types.add(this_val.__class__) types.add(that_val.__class__) @@ -486,7 +446,7 @@ def get(self, name, default=None): if name in self.required_properties: return self.__dict__[name] - return self.__dict__['_data_store'].get(name, default) + return self.__dict__["_data_store"].get(name, default) def __getitem__(self, name): """get the value of an attribute using square-bracket notation: `instance[attr]`""" @@ -494,9 +454,7 @@ def __getitem__(self, name): return self.get(name) raise PineconeApiAttributeError( - "{0} has no attribute '{1}'".format( - type(self).__name__, name), - [e for e in [self._path_to_item, name] if e] + "{0} has no attribute '{1}'".format(type(self).__name__, name), [e for e in [self._path_to_item, name] if e] ) def __contains__(self, name): @@ -504,7 +462,7 @@ def __contains__(self, name): if name in self.required_properties: return name in self.__dict__ - return name in self.__dict__['_data_store'] + return name in self.__dict__["_data_store"] def to_dict(self): """Returns the model properties as a dict""" @@ -589,9 +547,8 @@ def __setitem__(self, name, value): """ if name not in self.openapi_types: raise PineconeApiAttributeError( - "{0} has no attribute '{1}'".format( - type(self).__name__, name), - [e for e in [self._path_to_item, name] if e] + "{0} has no attribute '{1}'".format(type(self).__name__, name), + [e for e in [self._path_to_item, name] if e], ) # attribute must be set on self and composed instances self.set_attribute(name, value) @@ -599,7 +556,7 @@ def __setitem__(self, name, value): setattr(model_instance, name, value) if name not in self._var_name_to_model_instances: # we assigned an additional property - self.__dict__['_var_name_to_model_instances'][name] = self._composed_instances + [self] + self.__dict__["_var_name_to_model_instances"][name] = self._composed_instances + [self] return None __unset_attribute_value__ = object() @@ -632,7 +589,7 @@ def get(self, name, default=None): "Values stored for property {0} in {1} differ when looking " "at self and self's composed instances. All values must be " "the same".format(name, type(self).__name__), - [e for e in [self._path_to_item, name] if e] + [e for e in [self._path_to_item, name] if e], ) def __getitem__(self, name): @@ -640,9 +597,8 @@ def __getitem__(self, name): value = self.get(name, self.__unset_attribute_value__) if value is self.__unset_attribute_value__: raise PineconeApiAttributeError( - "{0} has no attribute '{1}'".format( - type(self).__name__, name), - [e for e in [self._path_to_item, name] if e] + "{0} has no attribute '{1}'".format(type(self).__name__, name), + [e for e in [self._path_to_item, name] if e], ) return value @@ -652,8 +608,7 @@ def __contains__(self, name): if name in self.required_properties: return name in self.__dict__ - model_instances = self._var_name_to_model_instances.get( - name, self._additional_properties_model_instances) + model_instances = self._var_name_to_model_instances.get(name, self._additional_properties_model_instances) if model_instances: for model_instance in model_instances: @@ -692,7 +647,7 @@ def __eq__(self, other): ModelComposed: 0, ModelNormal: 1, ModelSimple: 2, - none_type: 3, # The type of 'None'. + none_type: 3, # The type of 'None'. list: 4, dict: 5, float: 6, @@ -701,7 +656,7 @@ def __eq__(self, other): # datetime: 9, # date: 10, str: 11, - file_type: 12, # 'file_type' is an alias for the built-in 'file' or 'io.IOBase' type. + file_type: 12, # 'file_type' is an alias for the built-in 'file' or 'io.IOBase' type. } # these are used to limit what type conversions we try to do @@ -710,7 +665,7 @@ def __eq__(self, other): UPCONVERSION_TYPE_PAIRS = ( # (str, datetime), # (str, date), - (int, float), # A float may be serialized as an integer, e.g. '3' is a valid serialized float. + (int, float), # A float may be serialized as an integer, e.g. '3' is a valid serialized float. (list, ModelComposed), (dict, ModelComposed), (str, ModelComposed), @@ -757,7 +712,7 @@ def __eq__(self, other): # (str, date), # (int, str), # (float, str), - (str, file_type) + (str, file_type), ), } @@ -814,41 +769,22 @@ def check_allowed_values(allowed_values, input_variable_path, input_values): are checking to see if they are in allowed_values """ these_allowed_values = list(allowed_values[input_variable_path].values()) - if (isinstance(input_values, list) - and not set(input_values).issubset( - set(these_allowed_values))): - invalid_values = ", ".join( - map(str, set(input_values) - set(these_allowed_values))), + if isinstance(input_values, list) and not set(input_values).issubset(set(these_allowed_values)): + invalid_values = (", ".join(map(str, set(input_values) - set(these_allowed_values))),) raise PineconeApiValueError( - "Invalid values for `%s` [%s], must be a subset of [%s]" % - ( - input_variable_path[0], - invalid_values, - ", ".join(map(str, these_allowed_values)) - ) + "Invalid values for `%s` [%s], must be a subset of [%s]" + % (input_variable_path[0], invalid_values, ", ".join(map(str, these_allowed_values))) ) - elif (isinstance(input_values, dict) - and not set( - input_values.keys()).issubset(set(these_allowed_values))): - invalid_values = ", ".join( - map(str, set(input_values.keys()) - set(these_allowed_values))) + elif isinstance(input_values, dict) and not set(input_values.keys()).issubset(set(these_allowed_values)): + invalid_values = ", ".join(map(str, set(input_values.keys()) - set(these_allowed_values))) raise PineconeApiValueError( - "Invalid keys in `%s` [%s], must be a subset of [%s]" % - ( - input_variable_path[0], - invalid_values, - ", ".join(map(str, these_allowed_values)) - ) + "Invalid keys in `%s` [%s], must be a subset of [%s]" + % (input_variable_path[0], invalid_values, ", ".join(map(str, these_allowed_values))) ) - elif (not isinstance(input_values, (list, dict)) - and input_values not in these_allowed_values): + elif not isinstance(input_values, (list, dict)) and input_values not in these_allowed_values: raise PineconeApiValueError( - "Invalid value for `%s` (%s), must be one of %s" % - ( - input_variable_path[0], - input_values, - these_allowed_values - ) + "Invalid value for `%s` (%s), must be one of %s" + % (input_variable_path[0], input_values, these_allowed_values) ) @@ -862,14 +798,14 @@ def is_json_validation_enabled(schema_keyword, configuration=None): configuration (Configuration): the configuration class. """ - return (configuration is None or - not hasattr(configuration, '_disabled_client_side_validations') or - schema_keyword not in configuration._disabled_client_side_validations) + return ( + configuration is None + or not hasattr(configuration, "_disabled_client_side_validations") + or schema_keyword not in configuration._disabled_client_side_validations + ) -def check_validations( - validations, input_variable_path, input_values, - configuration=None): +def check_validations(validations, input_variable_path, input_values, configuration=None): """Raises an exception if the input_values are invalid Args: @@ -884,66 +820,60 @@ def check_validations( return current_validations = validations[input_variable_path] - if (is_json_validation_enabled('multipleOf', configuration) and - 'multiple_of' in current_validations and - isinstance(input_values, (int, float)) and - not (float(input_values) / current_validations['multiple_of']).is_integer()): + if ( + is_json_validation_enabled("multipleOf", configuration) + and "multiple_of" in current_validations + and isinstance(input_values, (int, float)) + and not (float(input_values) / current_validations["multiple_of"]).is_integer() + ): # Note 'multipleOf' will be as good as the floating point arithmetic. raise PineconeApiValueError( "Invalid value for `%s`, value must be a multiple of " - "`%s`" % ( - input_variable_path[0], - current_validations['multiple_of'] - ) + "`%s`" % (input_variable_path[0], current_validations["multiple_of"]) ) - if (is_json_validation_enabled('maxLength', configuration) and - 'max_length' in current_validations and - len(input_values) > current_validations['max_length']): + if ( + is_json_validation_enabled("maxLength", configuration) + and "max_length" in current_validations + and len(input_values) > current_validations["max_length"] + ): raise PineconeApiValueError( "Invalid value for `%s`, length must be less than or equal to " - "`%s`" % ( - input_variable_path[0], - current_validations['max_length'] - ) + "`%s`" % (input_variable_path[0], current_validations["max_length"]) ) - if (is_json_validation_enabled('minLength', configuration) and - 'min_length' in current_validations and - len(input_values) < current_validations['min_length']): + if ( + is_json_validation_enabled("minLength", configuration) + and "min_length" in current_validations + and len(input_values) < current_validations["min_length"] + ): raise PineconeApiValueError( "Invalid value for `%s`, length must be greater than or equal to " - "`%s`" % ( - input_variable_path[0], - current_validations['min_length'] - ) + "`%s`" % (input_variable_path[0], current_validations["min_length"]) ) - if (is_json_validation_enabled('maxItems', configuration) and - 'max_items' in current_validations and - len(input_values) > current_validations['max_items']): + if ( + is_json_validation_enabled("maxItems", configuration) + and "max_items" in current_validations + and len(input_values) > current_validations["max_items"] + ): raise PineconeApiValueError( "Invalid value for `%s`, number of items must be less than or " - "equal to `%s`" % ( - input_variable_path[0], - current_validations['max_items'] - ) + "equal to `%s`" % (input_variable_path[0], current_validations["max_items"]) ) - if (is_json_validation_enabled('minItems', configuration) and - 'min_items' in current_validations and - len(input_values) < current_validations['min_items']): + if ( + is_json_validation_enabled("minItems", configuration) + and "min_items" in current_validations + and len(input_values) < current_validations["min_items"] + ): raise ValueError( "Invalid value for `%s`, number of items must be greater than or " - "equal to `%s`" % ( - input_variable_path[0], - current_validations['min_items'] - ) + "equal to `%s`" % (input_variable_path[0], current_validations["min_items"]) ) - items = ('exclusive_maximum', 'inclusive_maximum', 'exclusive_minimum', - 'inclusive_minimum') - if (any(item in current_validations for item in items)): + items = ("exclusive_maximum", "inclusive_maximum", "exclusive_minimum", "inclusive_minimum") + if any(item in current_validations for item in items): if isinstance(input_values, list): max_val = max(input_values) min_val = min(input_values) @@ -954,57 +884,55 @@ def check_validations( max_val = input_values min_val = input_values - if (is_json_validation_enabled('exclusiveMaximum', configuration) and - 'exclusive_maximum' in current_validations and - max_val >= current_validations['exclusive_maximum']): + if ( + is_json_validation_enabled("exclusiveMaximum", configuration) + and "exclusive_maximum" in current_validations + and max_val >= current_validations["exclusive_maximum"] + ): raise PineconeApiValueError( - "Invalid value for `%s`, must be a value less than `%s`" % ( - input_variable_path[0], - current_validations['exclusive_maximum'] - ) + "Invalid value for `%s`, must be a value less than `%s`" + % (input_variable_path[0], current_validations["exclusive_maximum"]) ) - if (is_json_validation_enabled('maximum', configuration) and - 'inclusive_maximum' in current_validations and - max_val > current_validations['inclusive_maximum']): + if ( + is_json_validation_enabled("maximum", configuration) + and "inclusive_maximum" in current_validations + and max_val > current_validations["inclusive_maximum"] + ): raise PineconeApiValueError( "Invalid value for `%s`, must be a value less than or equal to " - "`%s`" % ( - input_variable_path[0], - current_validations['inclusive_maximum'] - ) + "`%s`" % (input_variable_path[0], current_validations["inclusive_maximum"]) ) - if (is_json_validation_enabled('exclusiveMinimum', configuration) and - 'exclusive_minimum' in current_validations and - min_val <= current_validations['exclusive_minimum']): + if ( + is_json_validation_enabled("exclusiveMinimum", configuration) + and "exclusive_minimum" in current_validations + and min_val <= current_validations["exclusive_minimum"] + ): raise PineconeApiValueError( - "Invalid value for `%s`, must be a value greater than `%s`" % - ( - input_variable_path[0], - current_validations['exclusive_maximum'] - ) + "Invalid value for `%s`, must be a value greater than `%s`" + % (input_variable_path[0], current_validations["exclusive_maximum"]) ) - if (is_json_validation_enabled('minimum', configuration) and - 'inclusive_minimum' in current_validations and - min_val < current_validations['inclusive_minimum']): + if ( + is_json_validation_enabled("minimum", configuration) + and "inclusive_minimum" in current_validations + and min_val < current_validations["inclusive_minimum"] + ): raise PineconeApiValueError( "Invalid value for `%s`, must be a value greater than or equal " - "to `%s`" % ( - input_variable_path[0], - current_validations['inclusive_minimum'] - ) + "to `%s`" % (input_variable_path[0], current_validations["inclusive_minimum"]) ) - flags = current_validations.get('regex', {}).get('flags', 0) - if (is_json_validation_enabled('pattern', configuration) and - 'regex' in current_validations and - not re.search(current_validations['regex']['pattern'], - input_values, flags=flags)): + flags = current_validations.get("regex", {}).get("flags", 0) + if ( + is_json_validation_enabled("pattern", configuration) + and "regex" in current_validations + and not re.search(current_validations["regex"]["pattern"], input_values, flags=flags) + ): err_msg = r"Invalid value for `%s`, must match regular expression `%s`" % ( - input_variable_path[0], - current_validations['regex']['pattern'] - ) + input_variable_path[0], + current_validations["regex"]["pattern"], + ) if flags != 0: # Don't print the regex flags if the flags are not # specified in the OAS document. @@ -1029,28 +957,21 @@ def index_getter(class_or_instance): return COERCION_INDEX_BY_TYPE[list] elif isinstance(class_or_instance, dict): return COERCION_INDEX_BY_TYPE[dict] - elif (inspect.isclass(class_or_instance) - and issubclass(class_or_instance, ModelComposed)): + elif inspect.isclass(class_or_instance) and issubclass(class_or_instance, ModelComposed): return COERCION_INDEX_BY_TYPE[ModelComposed] - elif (inspect.isclass(class_or_instance) - and issubclass(class_or_instance, ModelNormal)): + elif inspect.isclass(class_or_instance) and issubclass(class_or_instance, ModelNormal): return COERCION_INDEX_BY_TYPE[ModelNormal] - elif (inspect.isclass(class_or_instance) - and issubclass(class_or_instance, ModelSimple)): + elif inspect.isclass(class_or_instance) and issubclass(class_or_instance, ModelSimple): return COERCION_INDEX_BY_TYPE[ModelSimple] elif class_or_instance in COERCION_INDEX_BY_TYPE: return COERCION_INDEX_BY_TYPE[class_or_instance] raise PineconeApiValueError("Unsupported type: %s" % class_or_instance) - sorted_types = sorted( - required_types, - key=lambda class_or_instance: index_getter(class_or_instance) - ) + sorted_types = sorted(required_types, key=lambda class_or_instance: index_getter(class_or_instance)) return sorted_types -def remove_uncoercible(required_types_classes, current_item, spec_property_naming, - must_convert=True): +def remove_uncoercible(required_types_classes, current_item, spec_property_naming, must_convert=True): """Only keeps the type conversions that are possible Args: @@ -1095,6 +1016,7 @@ def remove_uncoercible(required_types_classes, current_item, spec_property_namin results_classes.append(required_type_class) return results_classes + def get_discriminated_classes(cls): """ Returns all the classes that a discriminator converts to @@ -1105,7 +1027,7 @@ def get_discriminated_classes(cls): if is_type_nullable(cls): possible_classes.append(cls) for discr_cls in cls.discriminator[key].values(): - if hasattr(discr_cls, 'discriminator') and discr_cls.discriminator is not None: + if hasattr(discr_cls, "discriminator") and discr_cls.discriminator is not None: possible_classes.extend(get_discriminated_classes(discr_cls)) else: possible_classes.append(discr_cls) @@ -1117,7 +1039,7 @@ def get_possible_classes(cls, from_server_context): possible_classes = [cls] if from_server_context: return possible_classes - if hasattr(cls, 'discriminator') and cls.discriminator is not None: + if hasattr(cls, "discriminator") and cls.discriminator is not None: possible_classes = [] possible_classes.extend(get_discriminated_classes(cls)) elif issubclass(cls, ModelComposed): @@ -1173,11 +1095,10 @@ def change_keys_js_to_python(input_dict, model_class): document). """ - if getattr(model_class, 'attribute_map', None) is None: + if getattr(model_class, "attribute_map", None) is None: return input_dict output_dict = {} - reversed_attr_map = {value: key for key, value in - model_class.attribute_map.items()} + reversed_attr_map = {value: key for key, value in model_class.attribute_map.items()} for javascript_key, value in input_dict.items(): python_key = reversed_attr_map.get(javascript_key) if python_key is None: @@ -1190,17 +1111,9 @@ def change_keys_js_to_python(input_dict, model_class): def get_type_error(var_value, path_to_item, valid_classes, key_type=False): error_msg = type_error_message( - var_name=path_to_item[-1], - var_value=var_value, - valid_classes=valid_classes, - key_type=key_type - ) - return PineconeApiTypeError( - error_msg, - path_to_item=path_to_item, - valid_classes=valid_classes, - key_type=key_type + var_name=path_to_item[-1], var_value=var_value, valid_classes=valid_classes, key_type=key_type ) + return PineconeApiTypeError(error_msg, path_to_item=path_to_item, valid_classes=valid_classes, key_type=key_type) def deserialize_primitive(data, klass, path_to_item): @@ -1243,21 +1156,17 @@ def deserialize_primitive(data, klass, path_to_item): if isinstance(data, str) and klass == float: if str(converted_value) != data: # '7' -> 7.0 -> '7.0' != '7' - raise ValueError('This is not a float') + raise ValueError("This is not a float") return converted_value except (OverflowError, ValueError) as ex: # parse can raise OverflowError raise PineconeApiValueError( - "{0}Failed to parse {1} as {2}".format( - additional_message, repr(data), klass.__name__ - ), - path_to_item=path_to_item + "{0}Failed to parse {1} as {2}".format(additional_message, repr(data), klass.__name__), + path_to_item=path_to_item, ) from ex -def get_discriminator_class(model_class, - discr_name, - discr_value, cls_visited): +def get_discriminator_class(model_class, discr_name, discr_value, cls_visited): """Returns the child class specified by the discriminator. Args: @@ -1293,22 +1202,21 @@ def get_discriminator_class(model_class, # Descendant example: mammal -> whale/zebra/Pig -> BasquePig/DanishPig # if we try to make BasquePig from mammal, we need to travel through # the oneOf descendant discriminators to find BasquePig - descendant_classes = model_class._composed_schemas.get('oneOf', ()) + \ - model_class._composed_schemas.get('anyOf', ()) - ancestor_classes = model_class._composed_schemas.get('allOf', ()) + descendant_classes = model_class._composed_schemas.get("oneOf", ()) + model_class._composed_schemas.get( + "anyOf", () + ) + ancestor_classes = model_class._composed_schemas.get("allOf", ()) possible_classes = descendant_classes + ancestor_classes for cls in possible_classes: # Check if the schema has inherited discriminators. - if hasattr(cls, 'discriminator') and cls.discriminator is not None: - used_model_class = get_discriminator_class( - cls, discr_name, discr_value, cls_visited) + if hasattr(cls, "discriminator") and cls.discriminator is not None: + used_model_class = get_discriminator_class(cls, discr_name, discr_value, cls_visited) if used_model_class is not None: return used_model_class return used_model_class -def deserialize_model(model_data, model_class, path_to_item, check_type, - configuration, spec_property_naming): +def deserialize_model(model_data, model_class, path_to_item, check_type, configuration, spec_property_naming): """Deserializes model_data to model instance. Args: @@ -1332,10 +1240,12 @@ def deserialize_model(model_data, model_class, path_to_item, check_type, PineconeApiKeyError """ - kw_args = dict(_check_type=check_type, - _path_to_item=path_to_item, - _configuration=configuration, - _spec_property_naming=spec_property_naming) + kw_args = dict( + _check_type=check_type, + _path_to_item=path_to_item, + _configuration=configuration, + _spec_property_naming=spec_property_naming, + ) if issubclass(model_class, ModelSimple): return model_class._new_from_openapi_data(model_data, **kw_args) @@ -1371,23 +1281,29 @@ def deserialize_file(response_data, configuration, content_disposition=None): os.remove(path) if content_disposition: - filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', - content_disposition).group(1) + filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', content_disposition).group(1) path = os.path.join(os.path.dirname(path), filename) with open(path, "wb") as f: if isinstance(response_data, str): # change str to bytes so we can write it - response_data = response_data.encode('utf-8') + response_data = response_data.encode("utf-8") f.write(response_data) f = open(path, "rb") return f -def attempt_convert_item(input_value, valid_classes, path_to_item, - configuration, spec_property_naming, key_type=False, - must_convert=False, check_type=True): +def attempt_convert_item( + input_value, + valid_classes, + path_to_item, + configuration, + spec_property_naming, + key_type=False, + must_convert=False, + check_type=True, +): """ Args: input_value (any): the data to convert @@ -1412,24 +1328,21 @@ def attempt_convert_item(input_value, valid_classes, path_to_item, PineconeApiKeyError """ valid_classes_ordered = order_response_types(valid_classes) - valid_classes_coercible = remove_uncoercible( - valid_classes_ordered, input_value, spec_property_naming) + valid_classes_coercible = remove_uncoercible(valid_classes_ordered, input_value, spec_property_naming) if not valid_classes_coercible or key_type: # we do not handle keytype errors, json will take care # of this for us if configuration is None or not configuration.discard_unknown_keys: - raise get_type_error(input_value, path_to_item, valid_classes, - key_type=key_type) + raise get_type_error(input_value, path_to_item, valid_classes, key_type=key_type) for valid_class in valid_classes_coercible: try: if issubclass(valid_class, OpenApiModel): - return deserialize_model(input_value, valid_class, - path_to_item, check_type, - configuration, spec_property_naming) + return deserialize_model( + input_value, valid_class, path_to_item, check_type, configuration, spec_property_naming + ) elif valid_class == file_type: return deserialize_file(input_value, configuration) - return deserialize_primitive(input_value, valid_class, - path_to_item) + return deserialize_primitive(input_value, valid_class, path_to_item) except (PineconeApiTypeError, PineconeApiValueError, PineconeApiKeyError) as conversion_exc: if must_convert: raise conversion_exc @@ -1461,10 +1374,12 @@ def is_type_nullable(input_type): return True if issubclass(input_type, ModelComposed): # If oneOf/anyOf, check if the 'null' type is one of the allowed types. - for t in input_type._composed_schemas.get('oneOf', ()): - if is_type_nullable(t): return True - for t in input_type._composed_schemas.get('anyOf', ()): - if is_type_nullable(t): return True + for t in input_type._composed_schemas.get("oneOf", ()): + if is_type_nullable(t): + return True + for t in input_type._composed_schemas.get("anyOf", ()): + if is_type_nullable(t): + return True return False @@ -1479,9 +1394,7 @@ def is_valid_type(input_class_simple, valid_classes): bool """ valid_type = input_class_simple in valid_classes - if not valid_type and ( - issubclass(input_class_simple, OpenApiModel) or - input_class_simple is none_type): + if not valid_type and (issubclass(input_class_simple, OpenApiModel) or input_class_simple is none_type): for valid_class in valid_classes: if input_class_simple is none_type and is_type_nullable(valid_class): # Schema is oneOf/anyOf and the 'null' type is one of the allowed types. @@ -1489,17 +1402,16 @@ def is_valid_type(input_class_simple, valid_classes): if not (issubclass(valid_class, OpenApiModel) and valid_class.discriminator): continue discr_propertyname_py = list(valid_class.discriminator.keys())[0] - discriminator_classes = ( - valid_class.discriminator[discr_propertyname_py].values() - ) + discriminator_classes = valid_class.discriminator[discr_propertyname_py].values() valid_type = is_valid_type(input_class_simple, discriminator_classes) if valid_type: return True return valid_type -def validate_and_convert_types(input_value, required_types_mixed, path_to_item, - spec_property_naming, _check_type, configuration=None): +def validate_and_convert_types( + input_value, required_types_mixed, path_to_item, spec_property_naming, _check_type, configuration=None +): """Raises a TypeError is there is a problem, otherwise returns value Args: @@ -1544,18 +1456,18 @@ def validate_and_convert_types(input_value, required_types_mixed, path_to_item, spec_property_naming, key_type=False, must_convert=True, - check_type=_check_type + check_type=_check_type, ) return converted_instance else: - raise get_type_error(input_value, path_to_item, valid_classes, - key_type=False) + raise get_type_error(input_value, path_to_item, valid_classes, key_type=False) # input_value's type is in valid_classes if len(valid_classes) > 1 and configuration: # there are valid classes which are not the current class valid_classes_coercible = remove_uncoercible( - valid_classes, input_value, spec_property_naming, must_convert=False) + valid_classes, input_value, spec_property_naming, must_convert=False + ) if valid_classes_coercible: converted_instance = attempt_convert_item( input_value, @@ -1565,7 +1477,7 @@ def validate_and_convert_types(input_value, required_types_mixed, path_to_item, spec_property_naming, key_type=False, must_convert=False, - check_type=_check_type + check_type=_check_type, ) return converted_instance @@ -1573,9 +1485,7 @@ def validate_and_convert_types(input_value, required_types_mixed, path_to_item, # all types are of the required types and there are no more inner # variables left to look at return input_value - inner_required_types = child_req_types_by_current_type.get( - type(input_value) - ) + inner_required_types = child_req_types_by_current_type.get(type(input_value)) if inner_required_types is None: # for this type, there are not more inner variables left to look at return input_value @@ -1592,7 +1502,7 @@ def validate_and_convert_types(input_value, required_types_mixed, path_to_item, inner_path, spec_property_naming, _check_type, - configuration=configuration + configuration=configuration, ) elif isinstance(input_value, dict): if input_value == {}: @@ -1602,15 +1512,14 @@ def validate_and_convert_types(input_value, required_types_mixed, path_to_item, inner_path = list(path_to_item) inner_path.append(inner_key) if get_simple_class(inner_key) != str: - raise get_type_error(inner_key, inner_path, valid_classes, - key_type=True) + raise get_type_error(inner_key, inner_path, valid_classes, key_type=True) input_value[inner_key] = validate_and_convert_types( inner_val, inner_required_types, inner_path, spec_property_naming, _check_type, - configuration=configuration + configuration=configuration, ) return input_value @@ -1646,29 +1555,33 @@ def model_to_dict(model_instance, serialize=True): except KeyError: used_fallback_python_attribute_names.add(attr) if isinstance(value, list): - if not value: - # empty list or None - result[attr] = value - else: - res = [] - for v in value: - if isinstance(v, PRIMITIVE_TYPES) or v is None: - res.append(v) - elif isinstance(v, ModelSimple): - res.append(v.value) - else: - res.append(model_to_dict(v, serialize=serialize)) - result[attr] = res + if not value: + # empty list or None + result[attr] = value + else: + res = [] + for v in value: + if isinstance(v, PRIMITIVE_TYPES) or v is None: + res.append(v) + elif isinstance(v, ModelSimple): + res.append(v.value) + else: + res.append(model_to_dict(v, serialize=serialize)) + result[attr] = res elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], - model_to_dict(item[1], serialize=serialize)) - if hasattr(item[1], '_data_store') else item, - value.items() - )) + result[attr] = dict( + map( + lambda item: ( + (item[0], model_to_dict(item[1], serialize=serialize)) + if hasattr(item[1], "_data_store") + else item + ), + value.items(), + ) + ) elif isinstance(value, ModelSimple): result[attr] = value.value - elif hasattr(value, '_data_store'): + elif hasattr(value, "_data_store"): result[attr] = model_to_dict(value, serialize=serialize) else: result[attr] = value @@ -1686,8 +1599,7 @@ def model_to_dict(model_instance, serialize=True): return result -def type_error_message(var_value=None, var_name=None, valid_classes=None, - key_type=None): +def type_error_message(var_value=None, var_name=None, valid_classes=None, key_type=None): """ Keyword Args: var_value (any): the variable which has the type_error @@ -1698,30 +1610,26 @@ def type_error_message(var_value=None, var_name=None, valid_classes=None, True if it is a key in a dict False if our item is an item in a list """ - key_or_value = 'value' + key_or_value = "value" if key_type: - key_or_value = 'key' + key_or_value = "key" valid_classes_phrase = get_valid_classes_phrase(valid_classes) - msg = ( - "Invalid type for variable '{0}'. Required {1} type {2} and " - "passed type was {3}".format( - var_name, - key_or_value, - valid_classes_phrase, - type(var_value).__name__, - ) + msg = "Invalid type for variable '{0}'. Required {1} type {2} and " "passed type was {3}".format( + var_name, + key_or_value, + valid_classes_phrase, + type(var_value).__name__, ) return msg def get_valid_classes_phrase(input_classes): - """Returns a string phrase describing what types are allowed - """ + """Returns a string phrase describing what types are allowed""" all_classes = list(input_classes) all_classes = sorted(all_classes, key=lambda cls: cls.__name__) all_class_names = [cls.__name__ for cls in all_classes] if len(all_class_names) == 1: - return 'is {0}'.format(all_class_names[0]) + return "is {0}".format(all_class_names[0]) return "is one of [{0}]".format(", ".join(all_class_names)) @@ -1743,8 +1651,7 @@ def get_allof_instances(self, model_args, constant_args): composed_instances (list) """ composed_instances = [] - for allof_class in self._composed_schemas['allOf']: - + for allof_class in self._composed_schemas["allOf"]: try: allof_instance = allof_class(**model_args, **constant_args) composed_instances.append(allof_instance) @@ -1752,12 +1659,7 @@ def get_allof_instances(self, model_args, constant_args): raise PineconeApiValueError( "Invalid inputs given to generate an instance of '%s'. The " "input data was invalid for the allOf schema '%s' in the composed " - "schema '%s'. Error=%s" % ( - allof_class.__name__, - allof_class.__name__, - self.__class__.__name__, - str(ex) - ) + "schema '%s'. Error=%s" % (allof_class.__name__, allof_class.__name__, self.__class__.__name__, str(ex)) ) from ex return composed_instances @@ -1790,13 +1692,13 @@ def get_oneof_instance(cls, model_kwargs, constant_kwargs, model_arg=None): Returns oneof_instance (instance) """ - if len(cls._composed_schemas['oneOf']) == 0: + if len(cls._composed_schemas["oneOf"]) == 0: return None oneof_instances = [] # Iterate over each oneOf schema and determine if the input data # matches the oneOf schemas. - for oneof_class in cls._composed_schemas['oneOf']: + for oneof_class in cls._composed_schemas["oneOf"]: # The composed oneOf schema allows the 'null' type and the input data # is the null value. This is a OAS >= 3.1 feature. if oneof_class is none_type: @@ -1816,10 +1718,10 @@ def get_oneof_instance(cls, model_kwargs, constant_kwargs, model_arg=None): oneof_instance = validate_and_convert_types( model_arg, (oneof_class,), - constant_kwargs['_path_to_item'], - constant_kwargs['_spec_property_naming'], - constant_kwargs['_check_type'], - configuration=constant_kwargs['_configuration'] + constant_kwargs["_path_to_item"], + constant_kwargs["_spec_property_naming"], + constant_kwargs["_check_type"], + configuration=constant_kwargs["_configuration"], ) oneof_instances.append(oneof_instance) except Exception: @@ -1827,14 +1729,12 @@ def get_oneof_instance(cls, model_kwargs, constant_kwargs, model_arg=None): if len(oneof_instances) == 0: raise PineconeApiValueError( "Invalid inputs given to generate an instance of %s. None " - "of the oneOf schemas matched the input data." % - cls.__name__ + "of the oneOf schemas matched the input data." % cls.__name__ ) elif len(oneof_instances) > 1: raise PineconeApiValueError( "Invalid inputs given to generate an instance of %s. Multiple " - "oneOf schemas matched the inputs, but a max of one is allowed." % - cls.__name__ + "oneOf schemas matched the inputs, but a max of one is allowed." % cls.__name__ ) return oneof_instances[0] @@ -1854,10 +1754,10 @@ def get_anyof_instances(self, model_args, constant_args): anyof_instances (list) """ anyof_instances = [] - if len(self._composed_schemas['anyOf']) == 0: + if len(self._composed_schemas["anyOf"]) == 0: return anyof_instances - for anyof_class in self._composed_schemas['anyOf']: + for anyof_class in self._composed_schemas["anyOf"]: # The composed oneOf schema allows the 'null' type and the input data # is the null value. This is a OAS >= 3.1 feature. if anyof_class is none_type: @@ -1873,8 +1773,7 @@ def get_anyof_instances(self, model_args, constant_args): if len(anyof_instances) == 0: raise PineconeApiValueError( "Invalid inputs given to generate an instance of %s. None of the " - "anyOf schemas matched the inputs." % - self.__class__.__name__ + "anyOf schemas matched the inputs." % self.__class__.__name__ ) return anyof_instances @@ -1888,7 +1787,7 @@ def get_discarded_args(self, composed_instances, model_args): # arguments passed to self were already converted to python names # before __init__ was called for instance in composed_instances: - if instance.__class__ in self._composed_schemas['allOf']: + if instance.__class__ in self._composed_schemas["allOf"]: try: keys = instance.to_dict().keys() discarded_keys = model_args - keys @@ -1983,9 +1882,4 @@ def validate_get_composed_info(constant_args, model_args, self): if prop_name not in discarded_args: var_name_to_model_instances[prop_name] = [self] + composed_instances - return [ - composed_instances, - var_name_to_model_instances, - additional_properties_model_instances, - discarded_args - ] + return [composed_instances, var_name_to_model_instances, additional_properties_model_instances, discarded_args] diff --git a/pinecone/core/client/rest.py b/pinecone/core/client/rest.py index ed704eae..28fd1ff9 100644 --- a/pinecone/core/client/rest.py +++ b/pinecone/core/client/rest.py @@ -8,7 +8,6 @@ Generated by: https://openapi-generator.tech """ - import io import json import logging @@ -19,25 +18,32 @@ import urllib3 -from pinecone.core.client.exceptions import PineconeApiException, UnauthorizedException, ForbiddenException, NotFoundException, ServiceException, PineconeApiValueError +from pinecone.core.client.exceptions import ( + PineconeApiException, + UnauthorizedException, + ForbiddenException, + NotFoundException, + ServiceException, + PineconeApiValueError, +) class bcolors: - HEADER = '\033[95m' - OKBLUE = '\033[94m' - OKCYAN = '\033[96m' - OKGREEN = '\033[92m' - WARNING = '\033[93m' - FAIL = '\033[91m' - ENDC = '\033[0m' - BOLD = '\033[1m' - UNDERLINE = '\033[4m' + HEADER = "\033[95m" + OKBLUE = "\033[94m" + OKCYAN = "\033[96m" + OKGREEN = "\033[92m" + WARNING = "\033[93m" + FAIL = "\033[91m" + ENDC = "\033[0m" + BOLD = "\033[1m" + UNDERLINE = "\033[4m" + logger = logging.getLogger(__name__) class RESTResponse(io.IOBase): - def __init__(self, resp): self.urllib3_response = resp self.status = resp.status @@ -54,7 +60,6 @@ def getheader(self, name, default=None): class RESTClientObject(object): - def __init__(self, configuration, pools_size=4, maxsize=None): # urllib3.PoolManager will pass all kw parameters to connectionpool # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 @@ -70,13 +75,13 @@ def __init__(self, configuration, pools_size=4, maxsize=None): addition_pool_args = {} if configuration.assert_hostname is not None: - addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 + addition_pool_args["assert_hostname"] = configuration.assert_hostname # noqa: E501 if configuration.retries is not None: - addition_pool_args['retries'] = configuration.retries + addition_pool_args["retries"] = configuration.retries if configuration.socket_options is not None: - addition_pool_args['socket_options'] = configuration.socket_options + addition_pool_args["socket_options"] = configuration.socket_options if maxsize is None: if configuration.connection_pool_maxsize is not None: @@ -95,7 +100,7 @@ def __init__(self, configuration, pools_size=4, maxsize=None): key_file=configuration.key_file, proxy_url=configuration.proxy, proxy_headers=configuration.proxy_headers, - **addition_pool_args + **addition_pool_args, ) else: self.pool_manager = urllib3.PoolManager( @@ -105,12 +110,20 @@ def __init__(self, configuration, pools_size=4, maxsize=None): ca_certs=configuration.ssl_ca_cert, cert_file=configuration.cert_file, key_file=configuration.key_file, - **addition_pool_args + **addition_pool_args, ) - def request(self, method, url, query_params=None, headers=None, - body=None, post_params=None, _preload_content=True, - _request_timeout=None): + def request( + self, + method, + url, + query_params=None, + headers=None, + body=None, + post_params=None, + _preload_content=True, + _request_timeout=None, + ): """Perform requests. :param method: http request method @@ -130,27 +143,35 @@ def request(self, method, url, query_params=None, headers=None, (connection, read) timeouts. """ method = method.upper() - assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', - 'PATCH', 'OPTIONS'] + assert method in ["GET", "HEAD", "DELETE", "POST", "PUT", "PATCH", "OPTIONS"] - if os.environ.get('PINECONE_DEBUG_CURL'): - formatted_headers = ' '.join(["-H '{0}: {1}'".format(k, v) - for k, v in headers.items()]) + if os.environ.get("PINECONE_DEBUG_CURL"): + formatted_headers = " ".join(["-H '{0}: {1}'".format(k, v) for k, v in headers.items()]) formatted_query = urlencode(query_params) if formatted_query: - formatted_url = f'{url}?{formatted_query}' + formatted_url = f"{url}?{formatted_query}" else: formatted_url = url if body is None: - print(bcolors.OKBLUE + "curl -X {method} '{url}' {formatted_headers}".format(method=method, url=formatted_url, formatted_headers=formatted_headers) + bcolors.ENDC) + print( + bcolors.OKBLUE + + "curl -X {method} '{url}' {formatted_headers}".format( + method=method, url=formatted_url, formatted_headers=formatted_headers + ) + + bcolors.ENDC + ) else: formatted_body = json.dumps(body) - print(bcolors.OKBLUE + "curl -X {method} '{url}' {formatted_headers} -d '{data}'".format(method=method, url=formatted_url, formatted_headers=formatted_headers, data=formatted_body) + bcolors.ENDC) + print( + bcolors.OKBLUE + + "curl -X {method} '{url}' {formatted_headers} -d '{data}'".format( + method=method, url=formatted_url, formatted_headers=formatted_headers, data=formatted_body + ) + + bcolors.ENDC + ) if post_params and body: - raise PineconeApiValueError( - "body parameter cannot be used with post_params parameter." - ) + raise PineconeApiValueError("body parameter cannot be used with post_params parameter.") post_params = post_params or {} headers = headers or {} @@ -159,60 +180,66 @@ def request(self, method, url, query_params=None, headers=None, if _request_timeout: if isinstance(_request_timeout, (int, float)): # noqa: E501,F821 timeout = urllib3.Timeout(total=_request_timeout) - elif (isinstance(_request_timeout, tuple) and - len(_request_timeout) == 2): - timeout = urllib3.Timeout( - connect=_request_timeout[0], read=_request_timeout[1]) + elif isinstance(_request_timeout, tuple) and len(_request_timeout) == 2: + timeout = urllib3.Timeout(connect=_request_timeout[0], read=_request_timeout[1]) try: # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` - if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: + if method in ["POST", "PUT", "PATCH", "OPTIONS", "DELETE"]: # Only set a default Content-Type for POST, PUT, PATCH and OPTIONS requests - if (method != 'DELETE') and ('Content-Type' not in headers): - headers['Content-Type'] = 'application/json' + if (method != "DELETE") and ("Content-Type" not in headers): + headers["Content-Type"] = "application/json" if query_params: - url += '?' + urlencode(query_params) - if ('Content-Type' not in headers) or (re.search('json', headers['Content-Type'], re.IGNORECASE)): + url += "?" + urlencode(query_params) + if ("Content-Type" not in headers) or (re.search("json", headers["Content-Type"], re.IGNORECASE)): request_body = None if body is not None: request_body = json.dumps(body) r = self.pool_manager.request( - method, url, + method, + url, body=request_body, preload_content=_preload_content, timeout=timeout, - headers=headers) - elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 + headers=headers, + ) + elif headers["Content-Type"] == "application/x-www-form-urlencoded": # noqa: E501 r = self.pool_manager.request( - method, url, + method, + url, fields=post_params, encode_multipart=False, preload_content=_preload_content, timeout=timeout, - headers=headers) - elif headers['Content-Type'] == 'multipart/form-data': + headers=headers, + ) + elif headers["Content-Type"] == "multipart/form-data": # must del headers['Content-Type'], or the correct # Content-Type which generated by urllib3 will be # overwritten. - del headers['Content-Type'] + del headers["Content-Type"] r = self.pool_manager.request( - method, url, + method, + url, fields=post_params, encode_multipart=True, preload_content=_preload_content, timeout=timeout, - headers=headers) + headers=headers, + ) # Pass a `string` parameter directly in the body to support # other content types than Json when `body` argument is # provided in serialized form elif isinstance(body, str) or isinstance(body, bytes): request_body = body r = self.pool_manager.request( - method, url, + method, + url, body=request_body, preload_content=_preload_content, timeout=timeout, - headers=headers) + headers=headers, + ) else: # Cannot generate the request from given parameters msg = """Cannot prepare a request message for provided @@ -221,27 +248,24 @@ def request(self, method, url, query_params=None, headers=None, raise PineconeApiException(status=0, reason=msg) # For `GET`, `HEAD` else: - r = self.pool_manager.request(method, url, - fields=query_params, - preload_content=_preload_content, - timeout=timeout, - headers=headers) + r = self.pool_manager.request( + method, url, fields=query_params, preload_content=_preload_content, timeout=timeout, headers=headers + ) except urllib3.exceptions.SSLError as e: msg = "{0}\n{1}".format(type(e).__name__, str(e)) raise PineconeApiException(status=0, reason=msg) - if os.environ.get('PINECONE_DEBUG_CURL'): + if os.environ.get("PINECONE_DEBUG_CURL"): o = RESTResponse(r) if o.status <= 300: - print(bcolors.OKGREEN + o.data.decode('utf-8') + bcolors.ENDC) + print(bcolors.OKGREEN + o.data.decode("utf-8") + bcolors.ENDC) else: - print(bcolors.FAIL + o.data.decode('utf-8') + bcolors.ENDC) - + print(bcolors.FAIL + o.data.decode("utf-8") + bcolors.ENDC) if _preload_content: r = RESTResponse(r) - + # log response body logger.debug("response body: %s", r.data) @@ -262,67 +286,117 @@ def request(self, method, url, query_params=None, headers=None, return r - def GET(self, url, headers=None, query_params=None, _preload_content=True, - _request_timeout=None): - return self.request("GET", url, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - query_params=query_params) - - def HEAD(self, url, headers=None, query_params=None, _preload_content=True, - _request_timeout=None): - return self.request("HEAD", url, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - query_params=query_params) - - def OPTIONS(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("OPTIONS", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def DELETE(self, url, headers=None, query_params=None, body=None, - _preload_content=True, _request_timeout=None): - return self.request("DELETE", url, - headers=headers, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def POST(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("POST", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def PUT(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("PUT", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def PATCH(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("PATCH", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) + def GET(self, url, headers=None, query_params=None, _preload_content=True, _request_timeout=None): + return self.request( + "GET", + url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params, + ) + + def HEAD(self, url, headers=None, query_params=None, _preload_content=True, _request_timeout=None): + return self.request( + "HEAD", + url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params, + ) + + def OPTIONS( + self, + url, + headers=None, + query_params=None, + post_params=None, + body=None, + _preload_content=True, + _request_timeout=None, + ): + return self.request( + "OPTIONS", + url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) + + def DELETE(self, url, headers=None, query_params=None, body=None, _preload_content=True, _request_timeout=None): + return self.request( + "DELETE", + url, + headers=headers, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) + + def POST( + self, + url, + headers=None, + query_params=None, + post_params=None, + body=None, + _preload_content=True, + _request_timeout=None, + ): + return self.request( + "POST", + url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) + + def PUT( + self, + url, + headers=None, + query_params=None, + post_params=None, + body=None, + _preload_content=True, + _request_timeout=None, + ): + return self.request( + "PUT", + url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) + + def PATCH( + self, + url, + headers=None, + query_params=None, + post_params=None, + body=None, + _preload_content=True, + _request_timeout=None, + ): + return self.request( + "PATCH", + url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body, + ) diff --git a/pinecone/core/grpc/protos/vector_service_pb2.py b/pinecone/core/grpc/protos/vector_service_pb2.py index 01997dfa..378065d6 100644 --- a/pinecone/core/grpc/protos/vector_service_pb2.py +++ b/pinecone/core/grpc/protos/vector_service_pb2.py @@ -7,6 +7,7 @@ from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -18,190 +19,264 @@ from protoc_gen_openapiv2.options import annotations_pb2 as protoc__gen__openapiv2_dot_options_dot_annotations__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x14vector_service.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"\x80\x01\n\x0cSparseValues\x12\x36\n\x07indices\x18\x01 \x03(\rB%\x92\x41\x1eJ\x16[1, 312, 822, 14, 980]x\xe8\x07\x80\x01\x01\xe2\x41\x01\x02\x12\x38\n\x06values\x18\x02 \x03(\x02\x42(\x92\x41!J\x19[0.1, 0.2, 0.3, 0.4, 0.5]x\xe8\x07\x80\x01\x01\xe2\x41\x01\x02\"\xff\x01\n\x06Vector\x12-\n\x02id\x18\x01 \x01(\tB!\x92\x41\x1aJ\x12\"example-vector-1\"x\x80\x04\x80\x01\x01\xe2\x41\x01\x02\x12H\n\x06values\x18\x02 \x03(\x02\x42\x38\x92\x41\x31J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\xa0\x9c\x01\x80\x01\x01\xe2\x41\x01\x02\x12$\n\rsparse_values\x18\x04 \x01(\x0b\x32\r.SparseValues\x12V\n\x08metadata\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructB+\x92\x41(J&{\"genre\": \"documentary\", \"year\": 2019}\"\x94\x02\n\x0cScoredVector\x12-\n\x02id\x18\x01 \x01(\tB!\x92\x41\x1aJ\x12\"example-vector-1\"x\x80\x04\x80\x01\x01\xe2\x41\x01\x02\x12\x18\n\x05score\x18\x02 \x01(\x02\x42\t\x92\x41\x06J\x04\x30.08\x12=\n\x06values\x18\x03 \x03(\x02\x42-\x92\x41*J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]\x12$\n\rsparse_values\x18\x05 \x01(\x0b\x32\r.SparseValues\x12V\n\x08metadata\x18\x04 \x01(\x0b\x32\x17.google.protobuf.StructB+\x92\x41(J&{\"genre\": \"documentary\", \"year\": 2019}\"\x89\x01\n\x0cRequestUnion\x12 \n\x06upsert\x18\x01 \x01(\x0b\x32\x0e.UpsertRequestH\x00\x12 \n\x06\x64\x65lete\x18\x02 \x01(\x0b\x32\x0e.DeleteRequestH\x00\x12 \n\x06update\x18\x03 \x01(\x0b\x32\x0e.UpdateRequestH\x00\x42\x13\n\x11RequestUnionInner\"e\n\rUpsertRequest\x12\'\n\x07vectors\x18\x01 \x03(\x0b\x32\x07.VectorB\r\x92\x41\x06x\xe8\x07\x80\x01\x01\xe2\x41\x01\x02\x12+\n\tnamespace\x18\x02 \x01(\tB\x18\x92\x41\x15J\x13\"example-namespace\"\"1\n\x0eUpsertResponse\x12\x1f\n\x0eupserted_count\x18\x01 \x01(\rB\x07\x92\x41\x04J\x02\x31\x30\"\xb6\x01\n\rDeleteRequest\x12(\n\x03ids\x18\x01 \x03(\tB\x1b\x92\x41\x18J\x10[\"id-0\", \"id-1\"]x\xe8\x07\x80\x01\x01\x12%\n\ndelete_all\x18\x02 \x01(\x08\x42\x11\x92\x41\x0e:\x05\x66\x61lseJ\x05\x66\x61lse\x12+\n\tnamespace\x18\x03 \x01(\tB\x18\x92\x41\x15J\x13\"example-namespace\"\x12\'\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x17.google.protobuf.Struct\"\x10\n\x0e\x44\x65leteResponse\"i\n\x0c\x46\x65tchRequest\x12,\n\x03ids\x18\x01 \x03(\tB\x1f\x92\x41\x18J\x10[\"id-0\", \"id-1\"]x\xe8\x07\x80\x01\x01\xe2\x41\x01\x02\x12+\n\tnamespace\x18\x02 \x01(\tB\x18\x92\x41\x15J\x13\"example-namespace\"\"\xe1\x01\n\rFetchResponse\x12,\n\x07vectors\x18\x01 \x03(\x0b\x32\x1b.FetchResponse.VectorsEntry\x12+\n\tnamespace\x18\x02 \x01(\tB\x18\x92\x41\x15J\x13\"example-namespace\"\x12\x32\n\x05usage\x18\x03 \x01(\x0b\x32\x06.UsageB\x16\x92\x41\x13J\x11{\"read_units\": 5}H\x00\x88\x01\x01\x1a\x37\n\x0cVectorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x16\n\x05value\x18\x02 \x01(\x0b\x32\x07.Vector:\x02\x38\x01\x42\x08\n\x06_usage\"\xf8\x01\n\x0bListRequest\x12,\n\x06prefix\x18\x01 \x01(\tB\x17\x92\x41\x14J\x0c\"document1#\"x\xe8\x07\x80\x01\x01H\x00\x88\x01\x01\x12 \n\x05limit\x18\x02 \x01(\rB\x0c\x92\x41\t:\x03\x31\x30\x30J\x02\x31\x32H\x01\x88\x01\x01\x12\x42\n\x10pagination_token\x18\x03 \x01(\tB#\x92\x41 J\x1e\"Tm90aGluZyB0byBzZWUgaGVyZQo=\"H\x02\x88\x01\x01\x12+\n\tnamespace\x18\x04 \x01(\tB\x18\x92\x41\x15J\x13\"example-namespace\"B\t\n\x07_prefixB\x08\n\x06_limitB\x13\n\x11_pagination_token\"?\n\nPagination\x12\x31\n\x04next\x18\x01 \x01(\tB#\x92\x41 J\x1e\"Tm90aGluZyB0byBzZWUgaGVyZQo=\"\",\n\x08ListItem\x12 \n\x02id\x18\x01 \x01(\tB\x14\x92\x41\x11J\x0f\"document1#abb\"\"\x83\x02\n\x0cListResponse\x12S\n\x07vectors\x18\x01 \x03(\x0b\x32\t.ListItemB7\x92\x41\x34J2[{\"id\": \"document1#abb\"}, {\"id\": \"document1#abc\"}]\x12$\n\npagination\x18\x02 \x01(\x0b\x32\x0b.PaginationH\x00\x88\x01\x01\x12+\n\tnamespace\x18\x03 \x01(\tB\x18\x92\x41\x15J\x13\"example-namespace\"\x12\x32\n\x05usage\x18\x04 \x01(\x0b\x32\x06.UsageB\x16\x92\x41\x13J\x11{\"read_units\": 1}H\x01\x88\x01\x01\x42\r\n\x0b_paginationB\x08\n\x06_usage\"\xd1\x02\n\x0bQueryVector\x12H\n\x06values\x18\x01 \x03(\x02\x42\x38\x92\x41\x31J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\xa0\x9c\x01\x80\x01\x01\xe2\x41\x01\x02\x12$\n\rsparse_values\x18\x05 \x01(\x0b\x32\r.SparseValues\x12(\n\x05top_k\x18\x02 \x01(\rB\x19\x92\x41\x16J\x02\x31\x30Y\x00\x00\x00\x00\x00\x88\xc3@i\x00\x00\x00\x00\x00\x00\xf0?\x12+\n\tnamespace\x18\x03 \x01(\tB\x18\x92\x41\x15J\x13\"example-namespace\"\x12{\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x17.google.protobuf.StructBR\x92\x41OJM{\"genre\": {\"$in\": [\"comedy\", \"documentary\", \"drama\"]}, \"year\": {\"$eq\": 2019}}\"\xfb\x03\n\x0cQueryRequest\x12+\n\tnamespace\x18\x01 \x01(\tB\x18\x92\x41\x15J\x13\"example-namespace\"\x12,\n\x05top_k\x18\x02 \x01(\rB\x1d\x92\x41\x16J\x02\x31\x30Y\x00\x00\x00\x00\x00\x88\xc3@i\x00\x00\x00\x00\x00\x00\xf0?\xe2\x41\x01\x02\x12{\n\x06\x66ilter\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructBR\x92\x41OJM{\"genre\": {\"$in\": [\"comedy\", \"documentary\", \"drama\"]}, \"year\": {\"$eq\": 2019}}\x12(\n\x0einclude_values\x18\x04 \x01(\x08\x42\x10\x92\x41\r:\x05\x66\x61lseJ\x04true\x12*\n\x10include_metadata\x18\x05 \x01(\x08\x42\x10\x92\x41\r:\x05\x66\x61lseJ\x04true\x12)\n\x07queries\x18\x06 \x03(\x0b\x32\x0c.QueryVectorB\n\x18\x01\x92\x41\x05x\n\x80\x01\x01\x12\x44\n\x06vector\x18\x07 \x03(\x02\x42\x34\x92\x41\x31J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\xa0\x9c\x01\x80\x01\x01\x12$\n\rsparse_vector\x18\t \x01(\x0b\x32\r.SparseValues\x12&\n\x02id\x18\x08 \x01(\tB\x1a\x92\x41\x17J\x12\"example-vector-1\"x\x80\x04\"a\n\x12SingleQueryResults\x12\x1e\n\x07matches\x18\x01 \x03(\x0b\x32\r.ScoredVector\x12+\n\tnamespace\x18\x02 \x01(\tB\x18\x92\x41\x15J\x13\"example-namespace\"\"\xaa\x01\n\rQueryResponse\x12(\n\x07results\x18\x01 \x03(\x0b\x32\x13.SingleQueryResultsB\x02\x18\x01\x12\x1e\n\x07matches\x18\x02 \x03(\x0b\x32\r.ScoredVector\x12\x11\n\tnamespace\x18\x03 \x01(\t\x12\x32\n\x05usage\x18\x04 \x01(\x0b\x32\x06.UsageB\x16\x92\x41\x13J\x11{\"read_units\": 5}H\x00\x88\x01\x01\x42\x08\n\x06_usage\"7\n\x05Usage\x12\x1f\n\nread_units\x18\x01 \x01(\rB\x06\x92\x41\x03J\x01\x35H\x00\x88\x01\x01\x42\r\n\x0b_read_units\"\xb3\x02\n\rUpdateRequest\x12-\n\x02id\x18\x01 \x01(\tB!\x92\x41\x1aJ\x12\"example-vector-1\"x\x80\x04\x80\x01\x01\xe2\x41\x01\x02\x12\x44\n\x06values\x18\x02 \x03(\x02\x42\x34\x92\x41\x31J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\xa0\x9c\x01\x80\x01\x01\x12$\n\rsparse_values\x18\x05 \x01(\x0b\x32\r.SparseValues\x12Z\n\x0cset_metadata\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructB+\x92\x41(J&{\"genre\": \"documentary\", \"year\": 2019}\x12+\n\tnamespace\x18\x04 \x01(\tB\x18\x92\x41\x15J\x13\"example-namespace\"\"\x10\n\x0eUpdateResponse\"D\n\x19\x44\x65scribeIndexStatsRequest\x12\'\n\x06\x66ilter\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct\"4\n\x10NamespaceSummary\x12 \n\x0cvector_count\x18\x01 \x01(\rB\n\x92\x41\x07J\x05\x35\x30\x30\x30\x30\"\x9a\x03\n\x1a\x44\x65scribeIndexStatsResponse\x12?\n\nnamespaces\x18\x01 \x03(\x0b\x32+.DescribeIndexStatsResponse.NamespacesEntry\x12\x1c\n\tdimension\x18\x02 \x01(\rB\t\x92\x41\x06J\x04\x31\x30\x32\x34\x12 \n\x0eindex_fullness\x18\x03 \x01(\x02\x42\x08\x92\x41\x05J\x03\x30.4\x12&\n\x12total_vector_count\x18\x04 \x01(\rB\n\x92\x41\x07J\x05\x38\x30\x30\x30\x30\x1a\x44\n\x0fNamespacesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12 \n\x05value\x18\x02 \x01(\x0b\x32\x11.NamespaceSummary:\x02\x38\x01:\x8c\x01\x92\x41\x88\x01\x32\x85\x01{\"namespaces\": {\"\": {\"vectorCount\": 50000}, \"example-namespace-2\": {\"vectorCount\": 30000}}, \"dimension\": 1024, \"index_fullness\": 0.4}2\x95\x06\n\rVectorService\x12\x63\n\x06Upsert\x12\x0e.UpsertRequest\x1a\x0f.UpsertResponse\"8\x92\x41\x1b\n\x11Vector Operations*\x06upsert\x82\xd3\xe4\x93\x02\x14\"\x0f/vectors/upsert:\x01*\x12v\n\x06\x44\x65lete\x12\x0e.DeleteRequest\x1a\x0f.DeleteResponse\"K\x92\x41\x1b\n\x11Vector Operations*\x06\x64\x65lete\x82\xd3\xe4\x93\x02\'\"\x0f/vectors/delete:\x01*Z\x11*\x0f/vectors/delete\x12[\n\x05\x46\x65tch\x12\r.FetchRequest\x1a\x0e.FetchResponse\"3\x92\x41\x1a\n\x11Vector Operations*\x05\x66\x65tch\x82\xd3\xe4\x93\x02\x10\x12\x0e/vectors/fetch\x12V\n\x04List\x12\x0c.ListRequest\x1a\r.ListResponse\"1\x92\x41\x19\n\x11Vector Operations*\x04list\x82\xd3\xe4\x93\x02\x0f\x12\r/vectors/list\x12V\n\x05Query\x12\r.QueryRequest\x1a\x0e.QueryResponse\".\x92\x41\x1a\n\x11Vector Operations*\x05query\x82\xd3\xe4\x93\x02\x0b\"\x06/query:\x01*\x12\x63\n\x06Update\x12\x0e.UpdateRequest\x1a\x0f.UpdateResponse\"8\x92\x41\x1b\n\x11Vector Operations*\x06update\x82\xd3\xe4\x93\x02\x14\"\x0f/vectors/update:\x01*\x12\xb4\x01\n\x12\x44\x65scribeIndexStats\x12\x1a.DescribeIndexStatsRequest\x1a\x1b.DescribeIndexStatsResponse\"e\x92\x41)\n\x11Vector Operations*\x14\x64\x65scribe_index_stats\x82\xd3\xe4\x93\x02\x33\"\x15/describe_index_stats:\x01*Z\x17\x12\x15/describe_index_statsB\x8f\x03\n\x11io.pinecone.protoP\x01Z+github.com/pinecone-io/go-pinecone/pinecone\x92\x41\xc9\x02\x12K\n\x0cPinecone API\";\n\x0fPinecone.io Ops\x12\x13https://pinecone.io\x1a\x13support@pinecone.io\x1a\x0c{index_host}*\x01\x02\x32\x10\x61pplication/json:\x10\x61pplication/jsonZx\nv\n\nApiKeyAuth\x12h\x08\x02\x12YAn API Key is required to call Pinecone APIs. Get yours at https://www.pinecone.io/start/\x1a\x07\x41pi-Key \x02\x62\x10\n\x0e\n\nApiKeyAuth\x12\x00r9\n\x19More Pinecone.io API docs\x12\x1chttps://www.pinecone.io/docsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x14vector_service.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.protoc-gen-openapiv2/options/annotations.proto"\x80\x01\n\x0cSparseValues\x12\x36\n\x07indices\x18\x01 \x03(\rB%\x92\x41\x1eJ\x16[1, 312, 822, 14, 980]x\xe8\x07\x80\x01\x01\xe2\x41\x01\x02\x12\x38\n\x06values\x18\x02 \x03(\x02\x42(\x92\x41!J\x19[0.1, 0.2, 0.3, 0.4, 0.5]x\xe8\x07\x80\x01\x01\xe2\x41\x01\x02"\xff\x01\n\x06Vector\x12-\n\x02id\x18\x01 \x01(\tB!\x92\x41\x1aJ\x12"example-vector-1"x\x80\x04\x80\x01\x01\xe2\x41\x01\x02\x12H\n\x06values\x18\x02 \x03(\x02\x42\x38\x92\x41\x31J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\xa0\x9c\x01\x80\x01\x01\xe2\x41\x01\x02\x12$\n\rsparse_values\x18\x04 \x01(\x0b\x32\r.SparseValues\x12V\n\x08metadata\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructB+\x92\x41(J&{"genre": "documentary", "year": 2019}"\x94\x02\n\x0cScoredVector\x12-\n\x02id\x18\x01 \x01(\tB!\x92\x41\x1aJ\x12"example-vector-1"x\x80\x04\x80\x01\x01\xe2\x41\x01\x02\x12\x18\n\x05score\x18\x02 \x01(\x02\x42\t\x92\x41\x06J\x04\x30.08\x12=\n\x06values\x18\x03 \x03(\x02\x42-\x92\x41*J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]\x12$\n\rsparse_values\x18\x05 \x01(\x0b\x32\r.SparseValues\x12V\n\x08metadata\x18\x04 \x01(\x0b\x32\x17.google.protobuf.StructB+\x92\x41(J&{"genre": "documentary", "year": 2019}"\x89\x01\n\x0cRequestUnion\x12 \n\x06upsert\x18\x01 \x01(\x0b\x32\x0e.UpsertRequestH\x00\x12 \n\x06\x64\x65lete\x18\x02 \x01(\x0b\x32\x0e.DeleteRequestH\x00\x12 \n\x06update\x18\x03 \x01(\x0b\x32\x0e.UpdateRequestH\x00\x42\x13\n\x11RequestUnionInner"e\n\rUpsertRequest\x12\'\n\x07vectors\x18\x01 \x03(\x0b\x32\x07.VectorB\r\x92\x41\x06x\xe8\x07\x80\x01\x01\xe2\x41\x01\x02\x12+\n\tnamespace\x18\x02 \x01(\tB\x18\x92\x41\x15J\x13"example-namespace""1\n\x0eUpsertResponse\x12\x1f\n\x0eupserted_count\x18\x01 \x01(\rB\x07\x92\x41\x04J\x02\x31\x30"\xb6\x01\n\rDeleteRequest\x12(\n\x03ids\x18\x01 \x03(\tB\x1b\x92\x41\x18J\x10["id-0", "id-1"]x\xe8\x07\x80\x01\x01\x12%\n\ndelete_all\x18\x02 \x01(\x08\x42\x11\x92\x41\x0e:\x05\x66\x61lseJ\x05\x66\x61lse\x12+\n\tnamespace\x18\x03 \x01(\tB\x18\x92\x41\x15J\x13"example-namespace"\x12\'\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x17.google.protobuf.Struct"\x10\n\x0e\x44\x65leteResponse"i\n\x0c\x46\x65tchRequest\x12,\n\x03ids\x18\x01 \x03(\tB\x1f\x92\x41\x18J\x10["id-0", "id-1"]x\xe8\x07\x80\x01\x01\xe2\x41\x01\x02\x12+\n\tnamespace\x18\x02 \x01(\tB\x18\x92\x41\x15J\x13"example-namespace""\xe1\x01\n\rFetchResponse\x12,\n\x07vectors\x18\x01 \x03(\x0b\x32\x1b.FetchResponse.VectorsEntry\x12+\n\tnamespace\x18\x02 \x01(\tB\x18\x92\x41\x15J\x13"example-namespace"\x12\x32\n\x05usage\x18\x03 \x01(\x0b\x32\x06.UsageB\x16\x92\x41\x13J\x11{"read_units": 5}H\x00\x88\x01\x01\x1a\x37\n\x0cVectorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x16\n\x05value\x18\x02 \x01(\x0b\x32\x07.Vector:\x02\x38\x01\x42\x08\n\x06_usage"\xf8\x01\n\x0bListRequest\x12,\n\x06prefix\x18\x01 \x01(\tB\x17\x92\x41\x14J\x0c"document1#"x\xe8\x07\x80\x01\x01H\x00\x88\x01\x01\x12 \n\x05limit\x18\x02 \x01(\rB\x0c\x92\x41\t:\x03\x31\x30\x30J\x02\x31\x32H\x01\x88\x01\x01\x12\x42\n\x10pagination_token\x18\x03 \x01(\tB#\x92\x41 J\x1e"Tm90aGluZyB0byBzZWUgaGVyZQo="H\x02\x88\x01\x01\x12+\n\tnamespace\x18\x04 \x01(\tB\x18\x92\x41\x15J\x13"example-namespace"B\t\n\x07_prefixB\x08\n\x06_limitB\x13\n\x11_pagination_token"?\n\nPagination\x12\x31\n\x04next\x18\x01 \x01(\tB#\x92\x41 J\x1e"Tm90aGluZyB0byBzZWUgaGVyZQo="",\n\x08ListItem\x12 \n\x02id\x18\x01 \x01(\tB\x14\x92\x41\x11J\x0f"document1#abb""\x83\x02\n\x0cListResponse\x12S\n\x07vectors\x18\x01 \x03(\x0b\x32\t.ListItemB7\x92\x41\x34J2[{"id": "document1#abb"}, {"id": "document1#abc"}]\x12$\n\npagination\x18\x02 \x01(\x0b\x32\x0b.PaginationH\x00\x88\x01\x01\x12+\n\tnamespace\x18\x03 \x01(\tB\x18\x92\x41\x15J\x13"example-namespace"\x12\x32\n\x05usage\x18\x04 \x01(\x0b\x32\x06.UsageB\x16\x92\x41\x13J\x11{"read_units": 1}H\x01\x88\x01\x01\x42\r\n\x0b_paginationB\x08\n\x06_usage"\xd1\x02\n\x0bQueryVector\x12H\n\x06values\x18\x01 \x03(\x02\x42\x38\x92\x41\x31J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\xa0\x9c\x01\x80\x01\x01\xe2\x41\x01\x02\x12$\n\rsparse_values\x18\x05 \x01(\x0b\x32\r.SparseValues\x12(\n\x05top_k\x18\x02 \x01(\rB\x19\x92\x41\x16J\x02\x31\x30Y\x00\x00\x00\x00\x00\x88\xc3@i\x00\x00\x00\x00\x00\x00\xf0?\x12+\n\tnamespace\x18\x03 \x01(\tB\x18\x92\x41\x15J\x13"example-namespace"\x12{\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x17.google.protobuf.StructBR\x92\x41OJM{"genre": {"$in": ["comedy", "documentary", "drama"]}, "year": {"$eq": 2019}}"\xfb\x03\n\x0cQueryRequest\x12+\n\tnamespace\x18\x01 \x01(\tB\x18\x92\x41\x15J\x13"example-namespace"\x12,\n\x05top_k\x18\x02 \x01(\rB\x1d\x92\x41\x16J\x02\x31\x30Y\x00\x00\x00\x00\x00\x88\xc3@i\x00\x00\x00\x00\x00\x00\xf0?\xe2\x41\x01\x02\x12{\n\x06\x66ilter\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructBR\x92\x41OJM{"genre": {"$in": ["comedy", "documentary", "drama"]}, "year": {"$eq": 2019}}\x12(\n\x0einclude_values\x18\x04 \x01(\x08\x42\x10\x92\x41\r:\x05\x66\x61lseJ\x04true\x12*\n\x10include_metadata\x18\x05 \x01(\x08\x42\x10\x92\x41\r:\x05\x66\x61lseJ\x04true\x12)\n\x07queries\x18\x06 \x03(\x0b\x32\x0c.QueryVectorB\n\x18\x01\x92\x41\x05x\n\x80\x01\x01\x12\x44\n\x06vector\x18\x07 \x03(\x02\x42\x34\x92\x41\x31J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\xa0\x9c\x01\x80\x01\x01\x12$\n\rsparse_vector\x18\t \x01(\x0b\x32\r.SparseValues\x12&\n\x02id\x18\x08 \x01(\tB\x1a\x92\x41\x17J\x12"example-vector-1"x\x80\x04"a\n\x12SingleQueryResults\x12\x1e\n\x07matches\x18\x01 \x03(\x0b\x32\r.ScoredVector\x12+\n\tnamespace\x18\x02 \x01(\tB\x18\x92\x41\x15J\x13"example-namespace""\xaa\x01\n\rQueryResponse\x12(\n\x07results\x18\x01 \x03(\x0b\x32\x13.SingleQueryResultsB\x02\x18\x01\x12\x1e\n\x07matches\x18\x02 \x03(\x0b\x32\r.ScoredVector\x12\x11\n\tnamespace\x18\x03 \x01(\t\x12\x32\n\x05usage\x18\x04 \x01(\x0b\x32\x06.UsageB\x16\x92\x41\x13J\x11{"read_units": 5}H\x00\x88\x01\x01\x42\x08\n\x06_usage"7\n\x05Usage\x12\x1f\n\nread_units\x18\x01 \x01(\rB\x06\x92\x41\x03J\x01\x35H\x00\x88\x01\x01\x42\r\n\x0b_read_units"\xb3\x02\n\rUpdateRequest\x12-\n\x02id\x18\x01 \x01(\tB!\x92\x41\x1aJ\x12"example-vector-1"x\x80\x04\x80\x01\x01\xe2\x41\x01\x02\x12\x44\n\x06values\x18\x02 \x03(\x02\x42\x34\x92\x41\x31J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\xa0\x9c\x01\x80\x01\x01\x12$\n\rsparse_values\x18\x05 \x01(\x0b\x32\r.SparseValues\x12Z\n\x0cset_metadata\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructB+\x92\x41(J&{"genre": "documentary", "year": 2019}\x12+\n\tnamespace\x18\x04 \x01(\tB\x18\x92\x41\x15J\x13"example-namespace""\x10\n\x0eUpdateResponse"D\n\x19\x44\x65scribeIndexStatsRequest\x12\'\n\x06\x66ilter\x18\x01 \x01(\x0b\x32\x17.google.protobuf.Struct"4\n\x10NamespaceSummary\x12 \n\x0cvector_count\x18\x01 \x01(\rB\n\x92\x41\x07J\x05\x35\x30\x30\x30\x30"\x9a\x03\n\x1a\x44\x65scribeIndexStatsResponse\x12?\n\nnamespaces\x18\x01 \x03(\x0b\x32+.DescribeIndexStatsResponse.NamespacesEntry\x12\x1c\n\tdimension\x18\x02 \x01(\rB\t\x92\x41\x06J\x04\x31\x30\x32\x34\x12 \n\x0eindex_fullness\x18\x03 \x01(\x02\x42\x08\x92\x41\x05J\x03\x30.4\x12&\n\x12total_vector_count\x18\x04 \x01(\rB\n\x92\x41\x07J\x05\x38\x30\x30\x30\x30\x1a\x44\n\x0fNamespacesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12 \n\x05value\x18\x02 \x01(\x0b\x32\x11.NamespaceSummary:\x02\x38\x01:\x8c\x01\x92\x41\x88\x01\x32\x85\x01{"namespaces": {"": {"vectorCount": 50000}, "example-namespace-2": {"vectorCount": 30000}}, "dimension": 1024, "index_fullness": 0.4}2\x95\x06\n\rVectorService\x12\x63\n\x06Upsert\x12\x0e.UpsertRequest\x1a\x0f.UpsertResponse"8\x92\x41\x1b\n\x11Vector Operations*\x06upsert\x82\xd3\xe4\x93\x02\x14"\x0f/vectors/upsert:\x01*\x12v\n\x06\x44\x65lete\x12\x0e.DeleteRequest\x1a\x0f.DeleteResponse"K\x92\x41\x1b\n\x11Vector Operations*\x06\x64\x65lete\x82\xd3\xe4\x93\x02\'"\x0f/vectors/delete:\x01*Z\x11*\x0f/vectors/delete\x12[\n\x05\x46\x65tch\x12\r.FetchRequest\x1a\x0e.FetchResponse"3\x92\x41\x1a\n\x11Vector Operations*\x05\x66\x65tch\x82\xd3\xe4\x93\x02\x10\x12\x0e/vectors/fetch\x12V\n\x04List\x12\x0c.ListRequest\x1a\r.ListResponse"1\x92\x41\x19\n\x11Vector Operations*\x04list\x82\xd3\xe4\x93\x02\x0f\x12\r/vectors/list\x12V\n\x05Query\x12\r.QueryRequest\x1a\x0e.QueryResponse".\x92\x41\x1a\n\x11Vector Operations*\x05query\x82\xd3\xe4\x93\x02\x0b"\x06/query:\x01*\x12\x63\n\x06Update\x12\x0e.UpdateRequest\x1a\x0f.UpdateResponse"8\x92\x41\x1b\n\x11Vector Operations*\x06update\x82\xd3\xe4\x93\x02\x14"\x0f/vectors/update:\x01*\x12\xb4\x01\n\x12\x44\x65scribeIndexStats\x12\x1a.DescribeIndexStatsRequest\x1a\x1b.DescribeIndexStatsResponse"e\x92\x41)\n\x11Vector Operations*\x14\x64\x65scribe_index_stats\x82\xd3\xe4\x93\x02\x33"\x15/describe_index_stats:\x01*Z\x17\x12\x15/describe_index_statsB\x8f\x03\n\x11io.pinecone.protoP\x01Z+github.com/pinecone-io/go-pinecone/pinecone\x92\x41\xc9\x02\x12K\n\x0cPinecone API";\n\x0fPinecone.io Ops\x12\x13https://pinecone.io\x1a\x13support@pinecone.io\x1a\x0c{index_host}*\x01\x02\x32\x10\x61pplication/json:\x10\x61pplication/jsonZx\nv\n\nApiKeyAuth\x12h\x08\x02\x12YAn API Key is required to call Pinecone APIs. Get yours at https://www.pinecone.io/start/\x1a\x07\x41pi-Key \x02\x62\x10\n\x0e\n\nApiKeyAuth\x12\x00r9\n\x19More Pinecone.io API docs\x12\x1chttps://www.pinecone.io/docsb\x06proto3' +) _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'vector_service_pb2', _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "vector_service_pb2", _globals) if _descriptor._USE_C_DESCRIPTORS == False: - _globals['DESCRIPTOR']._options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\021io.pinecone.protoP\001Z+github.com/pinecone-io/go-pinecone/pinecone\222A\311\002\022K\n\014Pinecone API\";\n\017Pinecone.io Ops\022\023https://pinecone.io\032\023support@pinecone.io\032\014{index_host}*\001\0022\020application/json:\020application/jsonZx\nv\n\nApiKeyAuth\022h\010\002\022YAn API Key is required to call Pinecone APIs. Get yours at https://www.pinecone.io/start/\032\007Api-Key \002b\020\n\016\n\nApiKeyAuth\022\000r9\n\031More Pinecone.io API docs\022\034https://www.pinecone.io/docs' - _globals['_SPARSEVALUES'].fields_by_name['indices']._options = None - _globals['_SPARSEVALUES'].fields_by_name['indices']._serialized_options = b'\222A\036J\026[1, 312, 822, 14, 980]x\350\007\200\001\001\342A\001\002' - _globals['_SPARSEVALUES'].fields_by_name['values']._options = None - _globals['_SPARSEVALUES'].fields_by_name['values']._serialized_options = b'\222A!J\031[0.1, 0.2, 0.3, 0.4, 0.5]x\350\007\200\001\001\342A\001\002' - _globals['_VECTOR'].fields_by_name['id']._options = None - _globals['_VECTOR'].fields_by_name['id']._serialized_options = b'\222A\032J\022\"example-vector-1\"x\200\004\200\001\001\342A\001\002' - _globals['_VECTOR'].fields_by_name['values']._options = None - _globals['_VECTOR'].fields_by_name['values']._serialized_options = b'\222A1J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\240\234\001\200\001\001\342A\001\002' - _globals['_VECTOR'].fields_by_name['metadata']._options = None - _globals['_VECTOR'].fields_by_name['metadata']._serialized_options = b'\222A(J&{\"genre\": \"documentary\", \"year\": 2019}' - _globals['_SCOREDVECTOR'].fields_by_name['id']._options = None - _globals['_SCOREDVECTOR'].fields_by_name['id']._serialized_options = b'\222A\032J\022\"example-vector-1\"x\200\004\200\001\001\342A\001\002' - _globals['_SCOREDVECTOR'].fields_by_name['score']._options = None - _globals['_SCOREDVECTOR'].fields_by_name['score']._serialized_options = b'\222A\006J\0040.08' - _globals['_SCOREDVECTOR'].fields_by_name['values']._options = None - _globals['_SCOREDVECTOR'].fields_by_name['values']._serialized_options = b'\222A*J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]' - _globals['_SCOREDVECTOR'].fields_by_name['metadata']._options = None - _globals['_SCOREDVECTOR'].fields_by_name['metadata']._serialized_options = b'\222A(J&{\"genre\": \"documentary\", \"year\": 2019}' - _globals['_UPSERTREQUEST'].fields_by_name['vectors']._options = None - _globals['_UPSERTREQUEST'].fields_by_name['vectors']._serialized_options = b'\222A\006x\350\007\200\001\001\342A\001\002' - _globals['_UPSERTREQUEST'].fields_by_name['namespace']._options = None - _globals['_UPSERTREQUEST'].fields_by_name['namespace']._serialized_options = b'\222A\025J\023\"example-namespace\"' - _globals['_UPSERTRESPONSE'].fields_by_name['upserted_count']._options = None - _globals['_UPSERTRESPONSE'].fields_by_name['upserted_count']._serialized_options = b'\222A\004J\00210' - _globals['_DELETEREQUEST'].fields_by_name['ids']._options = None - _globals['_DELETEREQUEST'].fields_by_name['ids']._serialized_options = b'\222A\030J\020[\"id-0\", \"id-1\"]x\350\007\200\001\001' - _globals['_DELETEREQUEST'].fields_by_name['delete_all']._options = None - _globals['_DELETEREQUEST'].fields_by_name['delete_all']._serialized_options = b'\222A\016:\005falseJ\005false' - _globals['_DELETEREQUEST'].fields_by_name['namespace']._options = None - _globals['_DELETEREQUEST'].fields_by_name['namespace']._serialized_options = b'\222A\025J\023\"example-namespace\"' - _globals['_FETCHREQUEST'].fields_by_name['ids']._options = None - _globals['_FETCHREQUEST'].fields_by_name['ids']._serialized_options = b'\222A\030J\020[\"id-0\", \"id-1\"]x\350\007\200\001\001\342A\001\002' - _globals['_FETCHREQUEST'].fields_by_name['namespace']._options = None - _globals['_FETCHREQUEST'].fields_by_name['namespace']._serialized_options = b'\222A\025J\023\"example-namespace\"' - _globals['_FETCHRESPONSE_VECTORSENTRY']._options = None - _globals['_FETCHRESPONSE_VECTORSENTRY']._serialized_options = b'8\001' - _globals['_FETCHRESPONSE'].fields_by_name['namespace']._options = None - _globals['_FETCHRESPONSE'].fields_by_name['namespace']._serialized_options = b'\222A\025J\023\"example-namespace\"' - _globals['_FETCHRESPONSE'].fields_by_name['usage']._options = None - _globals['_FETCHRESPONSE'].fields_by_name['usage']._serialized_options = b'\222A\023J\021{\"read_units\": 5}' - _globals['_LISTREQUEST'].fields_by_name['prefix']._options = None - _globals['_LISTREQUEST'].fields_by_name['prefix']._serialized_options = b'\222A\024J\014\"document1#\"x\350\007\200\001\001' - _globals['_LISTREQUEST'].fields_by_name['limit']._options = None - _globals['_LISTREQUEST'].fields_by_name['limit']._serialized_options = b'\222A\t:\003100J\00212' - _globals['_LISTREQUEST'].fields_by_name['pagination_token']._options = None - _globals['_LISTREQUEST'].fields_by_name['pagination_token']._serialized_options = b'\222A J\036\"Tm90aGluZyB0byBzZWUgaGVyZQo=\"' - _globals['_LISTREQUEST'].fields_by_name['namespace']._options = None - _globals['_LISTREQUEST'].fields_by_name['namespace']._serialized_options = b'\222A\025J\023\"example-namespace\"' - _globals['_PAGINATION'].fields_by_name['next']._options = None - _globals['_PAGINATION'].fields_by_name['next']._serialized_options = b'\222A J\036\"Tm90aGluZyB0byBzZWUgaGVyZQo=\"' - _globals['_LISTITEM'].fields_by_name['id']._options = None - _globals['_LISTITEM'].fields_by_name['id']._serialized_options = b'\222A\021J\017\"document1#abb\"' - _globals['_LISTRESPONSE'].fields_by_name['vectors']._options = None - _globals['_LISTRESPONSE'].fields_by_name['vectors']._serialized_options = b'\222A4J2[{\"id\": \"document1#abb\"}, {\"id\": \"document1#abc\"}]' - _globals['_LISTRESPONSE'].fields_by_name['namespace']._options = None - _globals['_LISTRESPONSE'].fields_by_name['namespace']._serialized_options = b'\222A\025J\023\"example-namespace\"' - _globals['_LISTRESPONSE'].fields_by_name['usage']._options = None - _globals['_LISTRESPONSE'].fields_by_name['usage']._serialized_options = b'\222A\023J\021{\"read_units\": 1}' - _globals['_QUERYVECTOR'].fields_by_name['values']._options = None - _globals['_QUERYVECTOR'].fields_by_name['values']._serialized_options = b'\222A1J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\240\234\001\200\001\001\342A\001\002' - _globals['_QUERYVECTOR'].fields_by_name['top_k']._options = None - _globals['_QUERYVECTOR'].fields_by_name['top_k']._serialized_options = b'\222A\026J\00210Y\000\000\000\000\000\210\303@i\000\000\000\000\000\000\360?' - _globals['_QUERYVECTOR'].fields_by_name['namespace']._options = None - _globals['_QUERYVECTOR'].fields_by_name['namespace']._serialized_options = b'\222A\025J\023\"example-namespace\"' - _globals['_QUERYVECTOR'].fields_by_name['filter']._options = None - _globals['_QUERYVECTOR'].fields_by_name['filter']._serialized_options = b'\222AOJM{\"genre\": {\"$in\": [\"comedy\", \"documentary\", \"drama\"]}, \"year\": {\"$eq\": 2019}}' - _globals['_QUERYREQUEST'].fields_by_name['namespace']._options = None - _globals['_QUERYREQUEST'].fields_by_name['namespace']._serialized_options = b'\222A\025J\023\"example-namespace\"' - _globals['_QUERYREQUEST'].fields_by_name['top_k']._options = None - _globals['_QUERYREQUEST'].fields_by_name['top_k']._serialized_options = b'\222A\026J\00210Y\000\000\000\000\000\210\303@i\000\000\000\000\000\000\360?\342A\001\002' - _globals['_QUERYREQUEST'].fields_by_name['filter']._options = None - _globals['_QUERYREQUEST'].fields_by_name['filter']._serialized_options = b'\222AOJM{\"genre\": {\"$in\": [\"comedy\", \"documentary\", \"drama\"]}, \"year\": {\"$eq\": 2019}}' - _globals['_QUERYREQUEST'].fields_by_name['include_values']._options = None - _globals['_QUERYREQUEST'].fields_by_name['include_values']._serialized_options = b'\222A\r:\005falseJ\004true' - _globals['_QUERYREQUEST'].fields_by_name['include_metadata']._options = None - _globals['_QUERYREQUEST'].fields_by_name['include_metadata']._serialized_options = b'\222A\r:\005falseJ\004true' - _globals['_QUERYREQUEST'].fields_by_name['queries']._options = None - _globals['_QUERYREQUEST'].fields_by_name['queries']._serialized_options = b'\030\001\222A\005x\n\200\001\001' - _globals['_QUERYREQUEST'].fields_by_name['vector']._options = None - _globals['_QUERYREQUEST'].fields_by_name['vector']._serialized_options = b'\222A1J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\240\234\001\200\001\001' - _globals['_QUERYREQUEST'].fields_by_name['id']._options = None - _globals['_QUERYREQUEST'].fields_by_name['id']._serialized_options = b'\222A\027J\022\"example-vector-1\"x\200\004' - _globals['_SINGLEQUERYRESULTS'].fields_by_name['namespace']._options = None - _globals['_SINGLEQUERYRESULTS'].fields_by_name['namespace']._serialized_options = b'\222A\025J\023\"example-namespace\"' - _globals['_QUERYRESPONSE'].fields_by_name['results']._options = None - _globals['_QUERYRESPONSE'].fields_by_name['results']._serialized_options = b'\030\001' - _globals['_QUERYRESPONSE'].fields_by_name['usage']._options = None - _globals['_QUERYRESPONSE'].fields_by_name['usage']._serialized_options = b'\222A\023J\021{\"read_units\": 5}' - _globals['_USAGE'].fields_by_name['read_units']._options = None - _globals['_USAGE'].fields_by_name['read_units']._serialized_options = b'\222A\003J\0015' - _globals['_UPDATEREQUEST'].fields_by_name['id']._options = None - _globals['_UPDATEREQUEST'].fields_by_name['id']._serialized_options = b'\222A\032J\022\"example-vector-1\"x\200\004\200\001\001\342A\001\002' - _globals['_UPDATEREQUEST'].fields_by_name['values']._options = None - _globals['_UPDATEREQUEST'].fields_by_name['values']._serialized_options = b'\222A1J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\240\234\001\200\001\001' - _globals['_UPDATEREQUEST'].fields_by_name['set_metadata']._options = None - _globals['_UPDATEREQUEST'].fields_by_name['set_metadata']._serialized_options = b'\222A(J&{\"genre\": \"documentary\", \"year\": 2019}' - _globals['_UPDATEREQUEST'].fields_by_name['namespace']._options = None - _globals['_UPDATEREQUEST'].fields_by_name['namespace']._serialized_options = b'\222A\025J\023\"example-namespace\"' - _globals['_NAMESPACESUMMARY'].fields_by_name['vector_count']._options = None - _globals['_NAMESPACESUMMARY'].fields_by_name['vector_count']._serialized_options = b'\222A\007J\00550000' - _globals['_DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY']._options = None - _globals['_DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY']._serialized_options = b'8\001' - _globals['_DESCRIBEINDEXSTATSRESPONSE'].fields_by_name['dimension']._options = None - _globals['_DESCRIBEINDEXSTATSRESPONSE'].fields_by_name['dimension']._serialized_options = b'\222A\006J\0041024' - _globals['_DESCRIBEINDEXSTATSRESPONSE'].fields_by_name['index_fullness']._options = None - _globals['_DESCRIBEINDEXSTATSRESPONSE'].fields_by_name['index_fullness']._serialized_options = b'\222A\005J\0030.4' - _globals['_DESCRIBEINDEXSTATSRESPONSE'].fields_by_name['total_vector_count']._options = None - _globals['_DESCRIBEINDEXSTATSRESPONSE'].fields_by_name['total_vector_count']._serialized_options = b'\222A\007J\00580000' - _globals['_DESCRIBEINDEXSTATSRESPONSE']._options = None - _globals['_DESCRIBEINDEXSTATSRESPONSE']._serialized_options = b'\222A\210\0012\205\001{\"namespaces\": {\"\": {\"vectorCount\": 50000}, \"example-namespace-2\": {\"vectorCount\": 30000}}, \"dimension\": 1024, \"index_fullness\": 0.4}' - _globals['_VECTORSERVICE'].methods_by_name['Upsert']._options = None - _globals['_VECTORSERVICE'].methods_by_name['Upsert']._serialized_options = b'\222A\033\n\021Vector Operations*\006upsert\202\323\344\223\002\024\"\017/vectors/upsert:\001*' - _globals['_VECTORSERVICE'].methods_by_name['Delete']._options = None - _globals['_VECTORSERVICE'].methods_by_name['Delete']._serialized_options = b'\222A\033\n\021Vector Operations*\006delete\202\323\344\223\002\'\"\017/vectors/delete:\001*Z\021*\017/vectors/delete' - _globals['_VECTORSERVICE'].methods_by_name['Fetch']._options = None - _globals['_VECTORSERVICE'].methods_by_name['Fetch']._serialized_options = b'\222A\032\n\021Vector Operations*\005fetch\202\323\344\223\002\020\022\016/vectors/fetch' - _globals['_VECTORSERVICE'].methods_by_name['List']._options = None - _globals['_VECTORSERVICE'].methods_by_name['List']._serialized_options = b'\222A\031\n\021Vector Operations*\004list\202\323\344\223\002\017\022\r/vectors/list' - _globals['_VECTORSERVICE'].methods_by_name['Query']._options = None - _globals['_VECTORSERVICE'].methods_by_name['Query']._serialized_options = b'\222A\032\n\021Vector Operations*\005query\202\323\344\223\002\013\"\006/query:\001*' - _globals['_VECTORSERVICE'].methods_by_name['Update']._options = None - _globals['_VECTORSERVICE'].methods_by_name['Update']._serialized_options = b'\222A\033\n\021Vector Operations*\006update\202\323\344\223\002\024\"\017/vectors/update:\001*' - _globals['_VECTORSERVICE'].methods_by_name['DescribeIndexStats']._options = None - _globals['_VECTORSERVICE'].methods_by_name['DescribeIndexStats']._serialized_options = b'\222A)\n\021Vector Operations*\024describe_index_stats\202\323\344\223\0023\"\025/describe_index_stats:\001*Z\027\022\025/describe_index_stats' - _globals['_SPARSEVALUES']._serialized_start=166 - _globals['_SPARSEVALUES']._serialized_end=294 - _globals['_VECTOR']._serialized_start=297 - _globals['_VECTOR']._serialized_end=552 - _globals['_SCOREDVECTOR']._serialized_start=555 - _globals['_SCOREDVECTOR']._serialized_end=831 - _globals['_REQUESTUNION']._serialized_start=834 - _globals['_REQUESTUNION']._serialized_end=971 - _globals['_UPSERTREQUEST']._serialized_start=973 - _globals['_UPSERTREQUEST']._serialized_end=1074 - _globals['_UPSERTRESPONSE']._serialized_start=1076 - _globals['_UPSERTRESPONSE']._serialized_end=1125 - _globals['_DELETEREQUEST']._serialized_start=1128 - _globals['_DELETEREQUEST']._serialized_end=1310 - _globals['_DELETERESPONSE']._serialized_start=1312 - _globals['_DELETERESPONSE']._serialized_end=1328 - _globals['_FETCHREQUEST']._serialized_start=1330 - _globals['_FETCHREQUEST']._serialized_end=1435 - _globals['_FETCHRESPONSE']._serialized_start=1438 - _globals['_FETCHRESPONSE']._serialized_end=1663 - _globals['_FETCHRESPONSE_VECTORSENTRY']._serialized_start=1598 - _globals['_FETCHRESPONSE_VECTORSENTRY']._serialized_end=1653 - _globals['_LISTREQUEST']._serialized_start=1666 - _globals['_LISTREQUEST']._serialized_end=1914 - _globals['_PAGINATION']._serialized_start=1916 - _globals['_PAGINATION']._serialized_end=1979 - _globals['_LISTITEM']._serialized_start=1981 - _globals['_LISTITEM']._serialized_end=2025 - _globals['_LISTRESPONSE']._serialized_start=2028 - _globals['_LISTRESPONSE']._serialized_end=2287 - _globals['_QUERYVECTOR']._serialized_start=2290 - _globals['_QUERYVECTOR']._serialized_end=2627 - _globals['_QUERYREQUEST']._serialized_start=2630 - _globals['_QUERYREQUEST']._serialized_end=3137 - _globals['_SINGLEQUERYRESULTS']._serialized_start=3139 - _globals['_SINGLEQUERYRESULTS']._serialized_end=3236 - _globals['_QUERYRESPONSE']._serialized_start=3239 - _globals['_QUERYRESPONSE']._serialized_end=3409 - _globals['_USAGE']._serialized_start=3411 - _globals['_USAGE']._serialized_end=3466 - _globals['_UPDATEREQUEST']._serialized_start=3469 - _globals['_UPDATEREQUEST']._serialized_end=3776 - _globals['_UPDATERESPONSE']._serialized_start=3778 - _globals['_UPDATERESPONSE']._serialized_end=3794 - _globals['_DESCRIBEINDEXSTATSREQUEST']._serialized_start=3796 - _globals['_DESCRIBEINDEXSTATSREQUEST']._serialized_end=3864 - _globals['_NAMESPACESUMMARY']._serialized_start=3866 - _globals['_NAMESPACESUMMARY']._serialized_end=3918 - _globals['_DESCRIBEINDEXSTATSRESPONSE']._serialized_start=3921 - _globals['_DESCRIBEINDEXSTATSRESPONSE']._serialized_end=4331 - _globals['_DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY']._serialized_start=4120 - _globals['_DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY']._serialized_end=4188 - _globals['_VECTORSERVICE']._serialized_start=4334 - _globals['_VECTORSERVICE']._serialized_end=5123 + _globals["DESCRIPTOR"]._options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b'\n\021io.pinecone.protoP\001Z+github.com/pinecone-io/go-pinecone/pinecone\222A\311\002\022K\n\014Pinecone API";\n\017Pinecone.io Ops\022\023https://pinecone.io\032\023support@pinecone.io\032\014{index_host}*\001\0022\020application/json:\020application/jsonZx\nv\n\nApiKeyAuth\022h\010\002\022YAn API Key is required to call Pinecone APIs. Get yours at https://www.pinecone.io/start/\032\007Api-Key \002b\020\n\016\n\nApiKeyAuth\022\000r9\n\031More Pinecone.io API docs\022\034https://www.pinecone.io/docs' + ) + _globals["_SPARSEVALUES"].fields_by_name["indices"]._options = None + _globals["_SPARSEVALUES"].fields_by_name[ + "indices" + ]._serialized_options = b"\222A\036J\026[1, 312, 822, 14, 980]x\350\007\200\001\001\342A\001\002" + _globals["_SPARSEVALUES"].fields_by_name["values"]._options = None + _globals["_SPARSEVALUES"].fields_by_name[ + "values" + ]._serialized_options = b"\222A!J\031[0.1, 0.2, 0.3, 0.4, 0.5]x\350\007\200\001\001\342A\001\002" + _globals["_VECTOR"].fields_by_name["id"]._options = None + _globals["_VECTOR"].fields_by_name[ + "id" + ]._serialized_options = b'\222A\032J\022"example-vector-1"x\200\004\200\001\001\342A\001\002' + _globals["_VECTOR"].fields_by_name["values"]._options = None + _globals["_VECTOR"].fields_by_name[ + "values" + ]._serialized_options = b"\222A1J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\240\234\001\200\001\001\342A\001\002" + _globals["_VECTOR"].fields_by_name["metadata"]._options = None + _globals["_VECTOR"].fields_by_name[ + "metadata" + ]._serialized_options = b'\222A(J&{"genre": "documentary", "year": 2019}' + _globals["_SCOREDVECTOR"].fields_by_name["id"]._options = None + _globals["_SCOREDVECTOR"].fields_by_name[ + "id" + ]._serialized_options = b'\222A\032J\022"example-vector-1"x\200\004\200\001\001\342A\001\002' + _globals["_SCOREDVECTOR"].fields_by_name["score"]._options = None + _globals["_SCOREDVECTOR"].fields_by_name["score"]._serialized_options = b"\222A\006J\0040.08" + _globals["_SCOREDVECTOR"].fields_by_name["values"]._options = None + _globals["_SCOREDVECTOR"].fields_by_name[ + "values" + ]._serialized_options = b"\222A*J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]" + _globals["_SCOREDVECTOR"].fields_by_name["metadata"]._options = None + _globals["_SCOREDVECTOR"].fields_by_name[ + "metadata" + ]._serialized_options = b'\222A(J&{"genre": "documentary", "year": 2019}' + _globals["_UPSERTREQUEST"].fields_by_name["vectors"]._options = None + _globals["_UPSERTREQUEST"].fields_by_name[ + "vectors" + ]._serialized_options = b"\222A\006x\350\007\200\001\001\342A\001\002" + _globals["_UPSERTREQUEST"].fields_by_name["namespace"]._options = None + _globals["_UPSERTREQUEST"].fields_by_name["namespace"]._serialized_options = b'\222A\025J\023"example-namespace"' + _globals["_UPSERTRESPONSE"].fields_by_name["upserted_count"]._options = None + _globals["_UPSERTRESPONSE"].fields_by_name["upserted_count"]._serialized_options = b"\222A\004J\00210" + _globals["_DELETEREQUEST"].fields_by_name["ids"]._options = None + _globals["_DELETEREQUEST"].fields_by_name[ + "ids" + ]._serialized_options = b'\222A\030J\020["id-0", "id-1"]x\350\007\200\001\001' + _globals["_DELETEREQUEST"].fields_by_name["delete_all"]._options = None + _globals["_DELETEREQUEST"].fields_by_name["delete_all"]._serialized_options = b"\222A\016:\005falseJ\005false" + _globals["_DELETEREQUEST"].fields_by_name["namespace"]._options = None + _globals["_DELETEREQUEST"].fields_by_name["namespace"]._serialized_options = b'\222A\025J\023"example-namespace"' + _globals["_FETCHREQUEST"].fields_by_name["ids"]._options = None + _globals["_FETCHREQUEST"].fields_by_name[ + "ids" + ]._serialized_options = b'\222A\030J\020["id-0", "id-1"]x\350\007\200\001\001\342A\001\002' + _globals["_FETCHREQUEST"].fields_by_name["namespace"]._options = None + _globals["_FETCHREQUEST"].fields_by_name["namespace"]._serialized_options = b'\222A\025J\023"example-namespace"' + _globals["_FETCHRESPONSE_VECTORSENTRY"]._options = None + _globals["_FETCHRESPONSE_VECTORSENTRY"]._serialized_options = b"8\001" + _globals["_FETCHRESPONSE"].fields_by_name["namespace"]._options = None + _globals["_FETCHRESPONSE"].fields_by_name["namespace"]._serialized_options = b'\222A\025J\023"example-namespace"' + _globals["_FETCHRESPONSE"].fields_by_name["usage"]._options = None + _globals["_FETCHRESPONSE"].fields_by_name["usage"]._serialized_options = b'\222A\023J\021{"read_units": 5}' + _globals["_LISTREQUEST"].fields_by_name["prefix"]._options = None + _globals["_LISTREQUEST"].fields_by_name[ + "prefix" + ]._serialized_options = b'\222A\024J\014"document1#"x\350\007\200\001\001' + _globals["_LISTREQUEST"].fields_by_name["limit"]._options = None + _globals["_LISTREQUEST"].fields_by_name["limit"]._serialized_options = b"\222A\t:\003100J\00212" + _globals["_LISTREQUEST"].fields_by_name["pagination_token"]._options = None + _globals["_LISTREQUEST"].fields_by_name[ + "pagination_token" + ]._serialized_options = b'\222A J\036"Tm90aGluZyB0byBzZWUgaGVyZQo="' + _globals["_LISTREQUEST"].fields_by_name["namespace"]._options = None + _globals["_LISTREQUEST"].fields_by_name["namespace"]._serialized_options = b'\222A\025J\023"example-namespace"' + _globals["_PAGINATION"].fields_by_name["next"]._options = None + _globals["_PAGINATION"].fields_by_name["next"]._serialized_options = b'\222A J\036"Tm90aGluZyB0byBzZWUgaGVyZQo="' + _globals["_LISTITEM"].fields_by_name["id"]._options = None + _globals["_LISTITEM"].fields_by_name["id"]._serialized_options = b'\222A\021J\017"document1#abb"' + _globals["_LISTRESPONSE"].fields_by_name["vectors"]._options = None + _globals["_LISTRESPONSE"].fields_by_name[ + "vectors" + ]._serialized_options = b'\222A4J2[{"id": "document1#abb"}, {"id": "document1#abc"}]' + _globals["_LISTRESPONSE"].fields_by_name["namespace"]._options = None + _globals["_LISTRESPONSE"].fields_by_name["namespace"]._serialized_options = b'\222A\025J\023"example-namespace"' + _globals["_LISTRESPONSE"].fields_by_name["usage"]._options = None + _globals["_LISTRESPONSE"].fields_by_name["usage"]._serialized_options = b'\222A\023J\021{"read_units": 1}' + _globals["_QUERYVECTOR"].fields_by_name["values"]._options = None + _globals["_QUERYVECTOR"].fields_by_name[ + "values" + ]._serialized_options = b"\222A1J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\240\234\001\200\001\001\342A\001\002" + _globals["_QUERYVECTOR"].fields_by_name["top_k"]._options = None + _globals["_QUERYVECTOR"].fields_by_name[ + "top_k" + ]._serialized_options = b"\222A\026J\00210Y\000\000\000\000\000\210\303@i\000\000\000\000\000\000\360?" + _globals["_QUERYVECTOR"].fields_by_name["namespace"]._options = None + _globals["_QUERYVECTOR"].fields_by_name["namespace"]._serialized_options = b'\222A\025J\023"example-namespace"' + _globals["_QUERYVECTOR"].fields_by_name["filter"]._options = None + _globals["_QUERYVECTOR"].fields_by_name[ + "filter" + ]._serialized_options = b'\222AOJM{"genre": {"$in": ["comedy", "documentary", "drama"]}, "year": {"$eq": 2019}}' + _globals["_QUERYREQUEST"].fields_by_name["namespace"]._options = None + _globals["_QUERYREQUEST"].fields_by_name["namespace"]._serialized_options = b'\222A\025J\023"example-namespace"' + _globals["_QUERYREQUEST"].fields_by_name["top_k"]._options = None + _globals["_QUERYREQUEST"].fields_by_name[ + "top_k" + ]._serialized_options = b"\222A\026J\00210Y\000\000\000\000\000\210\303@i\000\000\000\000\000\000\360?\342A\001\002" + _globals["_QUERYREQUEST"].fields_by_name["filter"]._options = None + _globals["_QUERYREQUEST"].fields_by_name[ + "filter" + ]._serialized_options = b'\222AOJM{"genre": {"$in": ["comedy", "documentary", "drama"]}, "year": {"$eq": 2019}}' + _globals["_QUERYREQUEST"].fields_by_name["include_values"]._options = None + _globals["_QUERYREQUEST"].fields_by_name["include_values"]._serialized_options = b"\222A\r:\005falseJ\004true" + _globals["_QUERYREQUEST"].fields_by_name["include_metadata"]._options = None + _globals["_QUERYREQUEST"].fields_by_name["include_metadata"]._serialized_options = b"\222A\r:\005falseJ\004true" + _globals["_QUERYREQUEST"].fields_by_name["queries"]._options = None + _globals["_QUERYREQUEST"].fields_by_name["queries"]._serialized_options = b"\030\001\222A\005x\n\200\001\001" + _globals["_QUERYREQUEST"].fields_by_name["vector"]._options = None + _globals["_QUERYREQUEST"].fields_by_name[ + "vector" + ]._serialized_options = b"\222A1J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\240\234\001\200\001\001" + _globals["_QUERYREQUEST"].fields_by_name["id"]._options = None + _globals["_QUERYREQUEST"].fields_by_name["id"]._serialized_options = b'\222A\027J\022"example-vector-1"x\200\004' + _globals["_SINGLEQUERYRESULTS"].fields_by_name["namespace"]._options = None + _globals["_SINGLEQUERYRESULTS"].fields_by_name[ + "namespace" + ]._serialized_options = b'\222A\025J\023"example-namespace"' + _globals["_QUERYRESPONSE"].fields_by_name["results"]._options = None + _globals["_QUERYRESPONSE"].fields_by_name["results"]._serialized_options = b"\030\001" + _globals["_QUERYRESPONSE"].fields_by_name["usage"]._options = None + _globals["_QUERYRESPONSE"].fields_by_name["usage"]._serialized_options = b'\222A\023J\021{"read_units": 5}' + _globals["_USAGE"].fields_by_name["read_units"]._options = None + _globals["_USAGE"].fields_by_name["read_units"]._serialized_options = b"\222A\003J\0015" + _globals["_UPDATEREQUEST"].fields_by_name["id"]._options = None + _globals["_UPDATEREQUEST"].fields_by_name[ + "id" + ]._serialized_options = b'\222A\032J\022"example-vector-1"x\200\004\200\001\001\342A\001\002' + _globals["_UPDATEREQUEST"].fields_by_name["values"]._options = None + _globals["_UPDATEREQUEST"].fields_by_name[ + "values" + ]._serialized_options = b"\222A1J([0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]x\240\234\001\200\001\001" + _globals["_UPDATEREQUEST"].fields_by_name["set_metadata"]._options = None + _globals["_UPDATEREQUEST"].fields_by_name[ + "set_metadata" + ]._serialized_options = b'\222A(J&{"genre": "documentary", "year": 2019}' + _globals["_UPDATEREQUEST"].fields_by_name["namespace"]._options = None + _globals["_UPDATEREQUEST"].fields_by_name["namespace"]._serialized_options = b'\222A\025J\023"example-namespace"' + _globals["_NAMESPACESUMMARY"].fields_by_name["vector_count"]._options = None + _globals["_NAMESPACESUMMARY"].fields_by_name["vector_count"]._serialized_options = b"\222A\007J\00550000" + _globals["_DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY"]._options = None + _globals["_DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY"]._serialized_options = b"8\001" + _globals["_DESCRIBEINDEXSTATSRESPONSE"].fields_by_name["dimension"]._options = None + _globals["_DESCRIBEINDEXSTATSRESPONSE"].fields_by_name["dimension"]._serialized_options = b"\222A\006J\0041024" + _globals["_DESCRIBEINDEXSTATSRESPONSE"].fields_by_name["index_fullness"]._options = None + _globals["_DESCRIBEINDEXSTATSRESPONSE"].fields_by_name["index_fullness"]._serialized_options = b"\222A\005J\0030.4" + _globals["_DESCRIBEINDEXSTATSRESPONSE"].fields_by_name["total_vector_count"]._options = None + _globals["_DESCRIBEINDEXSTATSRESPONSE"].fields_by_name[ + "total_vector_count" + ]._serialized_options = b"\222A\007J\00580000" + _globals["_DESCRIBEINDEXSTATSRESPONSE"]._options = None + _globals["_DESCRIBEINDEXSTATSRESPONSE"]._serialized_options = ( + b'\222A\210\0012\205\001{"namespaces": {"": {"vectorCount": 50000}, "example-namespace-2": {"vectorCount": 30000}}, "dimension": 1024, "index_fullness": 0.4}' + ) + _globals["_VECTORSERVICE"].methods_by_name["Upsert"]._options = None + _globals["_VECTORSERVICE"].methods_by_name[ + "Upsert" + ]._serialized_options = ( + b'\222A\033\n\021Vector Operations*\006upsert\202\323\344\223\002\024"\017/vectors/upsert:\001*' + ) + _globals["_VECTORSERVICE"].methods_by_name["Delete"]._options = None + _globals["_VECTORSERVICE"].methods_by_name[ + "Delete" + ]._serialized_options = b"\222A\033\n\021Vector Operations*\006delete\202\323\344\223\002'\"\017/vectors/delete:\001*Z\021*\017/vectors/delete" + _globals["_VECTORSERVICE"].methods_by_name["Fetch"]._options = None + _globals["_VECTORSERVICE"].methods_by_name[ + "Fetch" + ]._serialized_options = b"\222A\032\n\021Vector Operations*\005fetch\202\323\344\223\002\020\022\016/vectors/fetch" + _globals["_VECTORSERVICE"].methods_by_name["List"]._options = None + _globals["_VECTORSERVICE"].methods_by_name[ + "List" + ]._serialized_options = b"\222A\031\n\021Vector Operations*\004list\202\323\344\223\002\017\022\r/vectors/list" + _globals["_VECTORSERVICE"].methods_by_name["Query"]._options = None + _globals["_VECTORSERVICE"].methods_by_name[ + "Query" + ]._serialized_options = b'\222A\032\n\021Vector Operations*\005query\202\323\344\223\002\013"\006/query:\001*' + _globals["_VECTORSERVICE"].methods_by_name["Update"]._options = None + _globals["_VECTORSERVICE"].methods_by_name[ + "Update" + ]._serialized_options = ( + b'\222A\033\n\021Vector Operations*\006update\202\323\344\223\002\024"\017/vectors/update:\001*' + ) + _globals["_VECTORSERVICE"].methods_by_name["DescribeIndexStats"]._options = None + _globals["_VECTORSERVICE"].methods_by_name[ + "DescribeIndexStats" + ]._serialized_options = b'\222A)\n\021Vector Operations*\024describe_index_stats\202\323\344\223\0023"\025/describe_index_stats:\001*Z\027\022\025/describe_index_stats' + _globals["_SPARSEVALUES"]._serialized_start = 166 + _globals["_SPARSEVALUES"]._serialized_end = 294 + _globals["_VECTOR"]._serialized_start = 297 + _globals["_VECTOR"]._serialized_end = 552 + _globals["_SCOREDVECTOR"]._serialized_start = 555 + _globals["_SCOREDVECTOR"]._serialized_end = 831 + _globals["_REQUESTUNION"]._serialized_start = 834 + _globals["_REQUESTUNION"]._serialized_end = 971 + _globals["_UPSERTREQUEST"]._serialized_start = 973 + _globals["_UPSERTREQUEST"]._serialized_end = 1074 + _globals["_UPSERTRESPONSE"]._serialized_start = 1076 + _globals["_UPSERTRESPONSE"]._serialized_end = 1125 + _globals["_DELETEREQUEST"]._serialized_start = 1128 + _globals["_DELETEREQUEST"]._serialized_end = 1310 + _globals["_DELETERESPONSE"]._serialized_start = 1312 + _globals["_DELETERESPONSE"]._serialized_end = 1328 + _globals["_FETCHREQUEST"]._serialized_start = 1330 + _globals["_FETCHREQUEST"]._serialized_end = 1435 + _globals["_FETCHRESPONSE"]._serialized_start = 1438 + _globals["_FETCHRESPONSE"]._serialized_end = 1663 + _globals["_FETCHRESPONSE_VECTORSENTRY"]._serialized_start = 1598 + _globals["_FETCHRESPONSE_VECTORSENTRY"]._serialized_end = 1653 + _globals["_LISTREQUEST"]._serialized_start = 1666 + _globals["_LISTREQUEST"]._serialized_end = 1914 + _globals["_PAGINATION"]._serialized_start = 1916 + _globals["_PAGINATION"]._serialized_end = 1979 + _globals["_LISTITEM"]._serialized_start = 1981 + _globals["_LISTITEM"]._serialized_end = 2025 + _globals["_LISTRESPONSE"]._serialized_start = 2028 + _globals["_LISTRESPONSE"]._serialized_end = 2287 + _globals["_QUERYVECTOR"]._serialized_start = 2290 + _globals["_QUERYVECTOR"]._serialized_end = 2627 + _globals["_QUERYREQUEST"]._serialized_start = 2630 + _globals["_QUERYREQUEST"]._serialized_end = 3137 + _globals["_SINGLEQUERYRESULTS"]._serialized_start = 3139 + _globals["_SINGLEQUERYRESULTS"]._serialized_end = 3236 + _globals["_QUERYRESPONSE"]._serialized_start = 3239 + _globals["_QUERYRESPONSE"]._serialized_end = 3409 + _globals["_USAGE"]._serialized_start = 3411 + _globals["_USAGE"]._serialized_end = 3466 + _globals["_UPDATEREQUEST"]._serialized_start = 3469 + _globals["_UPDATEREQUEST"]._serialized_end = 3776 + _globals["_UPDATERESPONSE"]._serialized_start = 3778 + _globals["_UPDATERESPONSE"]._serialized_end = 3794 + _globals["_DESCRIBEINDEXSTATSREQUEST"]._serialized_start = 3796 + _globals["_DESCRIBEINDEXSTATSREQUEST"]._serialized_end = 3864 + _globals["_NAMESPACESUMMARY"]._serialized_start = 3866 + _globals["_NAMESPACESUMMARY"]._serialized_end = 3918 + _globals["_DESCRIBEINDEXSTATSRESPONSE"]._serialized_start = 3921 + _globals["_DESCRIBEINDEXSTATSRESPONSE"]._serialized_end = 4331 + _globals["_DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY"]._serialized_start = 4120 + _globals["_DESCRIBEINDEXSTATSRESPONSE_NAMESPACESENTRY"]._serialized_end = 4188 + _globals["_VECTORSERVICE"]._serialized_start = 4334 + _globals["_VECTORSERVICE"]._serialized_end = 5123 # @@protoc_insertion_point(module_scope) diff --git a/pinecone/core/grpc/protos/vector_service_pb2.pyi b/pinecone/core/grpc/protos/vector_service_pb2.pyi index 2b09c253..ca6e70d3 100644 --- a/pinecone/core/grpc/protos/vector_service_pb2.pyi +++ b/pinecone/core/grpc/protos/vector_service_pb2.pyi @@ -64,8 +64,15 @@ class Vector(google.protobuf.message.Message): sparse_values: global___SparseValues | None = ..., metadata: google.protobuf.struct_pb2.Struct | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["metadata", b"metadata", "sparse_values", b"sparse_values"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["id", b"id", "metadata", b"metadata", "sparse_values", b"sparse_values", "values", b"values"]) -> None: ... + def HasField( + self, field_name: typing.Literal["metadata", b"metadata", "sparse_values", b"sparse_values"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "id", b"id", "metadata", b"metadata", "sparse_values", b"sparse_values", "values", b"values" + ], + ) -> None: ... global___Vector = Vector @@ -103,8 +110,24 @@ class ScoredVector(google.protobuf.message.Message): sparse_values: global___SparseValues | None = ..., metadata: google.protobuf.struct_pb2.Struct | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["metadata", b"metadata", "sparse_values", b"sparse_values"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["id", b"id", "metadata", b"metadata", "score", b"score", "sparse_values", b"sparse_values", "values", b"values"]) -> None: ... + def HasField( + self, field_name: typing.Literal["metadata", b"metadata", "sparse_values", b"sparse_values"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "id", + b"id", + "metadata", + b"metadata", + "score", + b"score", + "sparse_values", + b"sparse_values", + "values", + b"values", + ], + ) -> None: ... global___ScoredVector = ScoredVector @@ -132,9 +155,21 @@ class RequestUnion(google.protobuf.message.Message): delete: global___DeleteRequest | None = ..., update: global___UpdateRequest | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["RequestUnionInner", b"RequestUnionInner", "delete", b"delete", "update", b"update", "upsert", b"upsert"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["RequestUnionInner", b"RequestUnionInner", "delete", b"delete", "update", b"update", "upsert", b"upsert"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["RequestUnionInner", b"RequestUnionInner"]) -> typing.Literal["upsert", "delete", "update"] | None: ... + def HasField( + self, + field_name: typing.Literal[ + "RequestUnionInner", b"RequestUnionInner", "delete", b"delete", "update", b"update", "upsert", b"upsert" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "RequestUnionInner", b"RequestUnionInner", "delete", b"delete", "update", b"update", "upsert", b"upsert" + ], + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["RequestUnionInner", b"RequestUnionInner"] + ) -> typing.Literal["upsert", "delete", "update"] | None: ... global___RequestUnion = RequestUnion @@ -214,7 +249,12 @@ class DeleteRequest(google.protobuf.message.Message): filter: google.protobuf.struct_pb2.Struct | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["filter", b"filter"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["delete_all", b"delete_all", "filter", b"filter", "ids", b"ids", "namespace", b"namespace"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "delete_all", b"delete_all", "filter", b"filter", "ids", b"ids", "namespace", b"namespace" + ], + ) -> None: ... global___DeleteRequest = DeleteRequest @@ -288,7 +328,7 @@ class FetchResponse(google.protobuf.message.Message): @property def usage(self) -> global___Usage: - """ The usage for this operation.""" + """The usage for this operation.""" def __init__( self, @@ -298,7 +338,12 @@ class FetchResponse(google.protobuf.message.Message): usage: global___Usage | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["_usage", b"_usage", "usage", b"usage"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_usage", b"_usage", "namespace", b"namespace", "usage", b"usage", "vectors", b"vectors"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "_usage", b"_usage", "namespace", b"namespace", "usage", b"usage", "vectors", b"vectors" + ], + ) -> None: ... def WhichOneof(self, oneof_group: typing.Literal["_usage", b"_usage"]) -> typing.Literal["usage"] | None: ... global___FetchResponse = FetchResponse @@ -328,12 +373,48 @@ class ListRequest(google.protobuf.message.Message): pagination_token: builtins.str | None = ..., namespace: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_limit", b"_limit", "_pagination_token", b"_pagination_token", "_prefix", b"_prefix", "limit", b"limit", "pagination_token", b"pagination_token", "prefix", b"prefix"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_limit", b"_limit", "_pagination_token", b"_pagination_token", "_prefix", b"_prefix", "limit", b"limit", "namespace", b"namespace", "pagination_token", b"pagination_token", "prefix", b"prefix"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "_limit", + b"_limit", + "_pagination_token", + b"_pagination_token", + "_prefix", + b"_prefix", + "limit", + b"limit", + "pagination_token", + b"pagination_token", + "prefix", + b"prefix", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "_limit", + b"_limit", + "_pagination_token", + b"_pagination_token", + "_prefix", + b"_prefix", + "limit", + b"limit", + "namespace", + b"namespace", + "pagination_token", + b"pagination_token", + "prefix", + b"prefix", + ], + ) -> None: ... @typing.overload def WhichOneof(self, oneof_group: typing.Literal["_limit", b"_limit"]) -> typing.Literal["limit"] | None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_pagination_token", b"_pagination_token"]) -> typing.Literal["pagination_token"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["_pagination_token", b"_pagination_token"] + ) -> typing.Literal["pagination_token"] | None: ... @typing.overload def WhichOneof(self, oneof_group: typing.Literal["_prefix", b"_prefix"]) -> typing.Literal["prefix"] | None: ... @@ -391,7 +472,7 @@ class ListResponse(google.protobuf.message.Message): @property def usage(self) -> global___Usage: - """ The usage for this operation.""" + """The usage for this operation.""" def __init__( self, @@ -401,10 +482,33 @@ class ListResponse(google.protobuf.message.Message): namespace: builtins.str = ..., usage: global___Usage | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_pagination", b"_pagination", "_usage", b"_usage", "pagination", b"pagination", "usage", b"usage"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_pagination", b"_pagination", "_usage", b"_usage", "namespace", b"namespace", "pagination", b"pagination", "usage", b"usage", "vectors", b"vectors"]) -> None: ... + def HasField( + self, + field_name: typing.Literal[ + "_pagination", b"_pagination", "_usage", b"_usage", "pagination", b"pagination", "usage", b"usage" + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "_pagination", + b"_pagination", + "_usage", + b"_usage", + "namespace", + b"namespace", + "pagination", + b"pagination", + "usage", + b"usage", + "vectors", + b"vectors", + ], + ) -> None: ... @typing.overload - def WhichOneof(self, oneof_group: typing.Literal["_pagination", b"_pagination"]) -> typing.Literal["pagination"] | None: ... + def WhichOneof( + self, oneof_group: typing.Literal["_pagination", b"_pagination"] + ) -> typing.Literal["pagination"] | None: ... @typing.overload def WhichOneof(self, oneof_group: typing.Literal["_usage", b"_usage"]) -> typing.Literal["usage"] | None: ... @@ -446,8 +550,24 @@ class QueryVector(google.protobuf.message.Message): namespace: builtins.str = ..., filter: google.protobuf.struct_pb2.Struct | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["filter", b"filter", "sparse_values", b"sparse_values"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["filter", b"filter", "namespace", b"namespace", "sparse_values", b"sparse_values", "top_k", b"top_k", "values", b"values"]) -> None: ... + def HasField( + self, field_name: typing.Literal["filter", b"filter", "sparse_values", b"sparse_values"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "filter", + b"filter", + "namespace", + b"namespace", + "sparse_values", + b"sparse_values", + "top_k", + b"top_k", + "values", + b"values", + ], + ) -> None: ... global___QueryVector = QueryVector @@ -505,8 +625,32 @@ class QueryRequest(google.protobuf.message.Message): sparse_vector: global___SparseValues | None = ..., id: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["filter", b"filter", "sparse_vector", b"sparse_vector"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["filter", b"filter", "id", b"id", "include_metadata", b"include_metadata", "include_values", b"include_values", "namespace", b"namespace", "queries", b"queries", "sparse_vector", b"sparse_vector", "top_k", b"top_k", "vector", b"vector"]) -> None: ... + def HasField( + self, field_name: typing.Literal["filter", b"filter", "sparse_vector", b"sparse_vector"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "filter", + b"filter", + "id", + b"id", + "include_metadata", + b"include_metadata", + "include_values", + b"include_values", + "namespace", + b"namespace", + "queries", + b"queries", + "sparse_vector", + b"sparse_vector", + "top_k", + b"top_k", + "vector", + b"vector", + ], + ) -> None: ... global___QueryRequest = QueryRequest @@ -547,7 +691,9 @@ class QueryResponse(google.protobuf.message.Message): namespace: builtins.str """The namespace for the vectors.""" @property - def results(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SingleQueryResults]: + def results( + self, + ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SingleQueryResults]: """DEPRECATED. The results of each query. The order is the same as `QueryRequest.queries`.""" @property @@ -567,7 +713,21 @@ class QueryResponse(google.protobuf.message.Message): usage: global___Usage | None = ..., ) -> None: ... def HasField(self, field_name: typing.Literal["_usage", b"_usage", "usage", b"usage"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_usage", b"_usage", "matches", b"matches", "namespace", b"namespace", "results", b"results", "usage", b"usage"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "_usage", + b"_usage", + "matches", + b"matches", + "namespace", + b"namespace", + "results", + b"results", + "usage", + b"usage", + ], + ) -> None: ... def WhichOneof(self, oneof_group: typing.Literal["_usage", b"_usage"]) -> typing.Literal["usage"] | None: ... global___QueryResponse = QueryResponse @@ -584,9 +744,15 @@ class Usage(google.protobuf.message.Message): *, read_units: builtins.int | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["_read_units", b"_read_units", "read_units", b"read_units"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["_read_units", b"_read_units", "read_units", b"read_units"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["_read_units", b"_read_units"]) -> typing.Literal["read_units"] | None: ... + def HasField( + self, field_name: typing.Literal["_read_units", b"_read_units", "read_units", b"read_units"] + ) -> builtins.bool: ... + def ClearField( + self, field_name: typing.Literal["_read_units", b"_read_units", "read_units", b"read_units"] + ) -> None: ... + def WhichOneof( + self, oneof_group: typing.Literal["_read_units", b"_read_units"] + ) -> typing.Literal["read_units"] | None: ... global___Usage = Usage @@ -624,8 +790,24 @@ class UpdateRequest(google.protobuf.message.Message): set_metadata: google.protobuf.struct_pb2.Struct | None = ..., namespace: builtins.str = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["set_metadata", b"set_metadata", "sparse_values", b"sparse_values"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["id", b"id", "namespace", b"namespace", "set_metadata", b"set_metadata", "sparse_values", b"sparse_values", "values", b"values"]) -> None: ... + def HasField( + self, field_name: typing.Literal["set_metadata", b"set_metadata", "sparse_values", b"sparse_values"] + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "id", + b"id", + "namespace", + b"namespace", + "set_metadata", + b"set_metadata", + "sparse_values", + b"sparse_values", + "values", + b"values", + ], + ) -> None: ... global___UpdateRequest = UpdateRequest @@ -734,6 +916,18 @@ class DescribeIndexStatsResponse(google.protobuf.message.Message): index_fullness: builtins.float = ..., total_vector_count: builtins.int = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["dimension", b"dimension", "index_fullness", b"index_fullness", "namespaces", b"namespaces", "total_vector_count", b"total_vector_count"]) -> None: ... + def ClearField( + self, + field_name: typing.Literal[ + "dimension", + b"dimension", + "index_fullness", + b"index_fullness", + "namespaces", + b"namespaces", + "total_vector_count", + b"total_vector_count", + ], + ) -> None: ... global___DescribeIndexStatsResponse = DescribeIndexStatsResponse diff --git a/pinecone/core/grpc/protos/vector_service_pb2_grpc.py b/pinecone/core/grpc/protos/vector_service_pb2_grpc.py index f3126923..945dd7d9 100644 --- a/pinecone/core/grpc/protos/vector_service_pb2_grpc.py +++ b/pinecone/core/grpc/protos/vector_service_pb2_grpc.py @@ -4,6 +4,7 @@ import pinecone.core.grpc.protos.vector_service_pb2 as vector__service__pb2 + class VectorServiceStub(object): """The `VectorService` interface is exposed by Pinecone's vector index services. This service could also be called a `gRPC` service or a `REST`-like api. @@ -16,40 +17,40 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.Upsert = channel.unary_unary( - '/VectorService/Upsert', - request_serializer=vector__service__pb2.UpsertRequest.SerializeToString, - response_deserializer=vector__service__pb2.UpsertResponse.FromString, - ) + "/VectorService/Upsert", + request_serializer=vector__service__pb2.UpsertRequest.SerializeToString, + response_deserializer=vector__service__pb2.UpsertResponse.FromString, + ) self.Delete = channel.unary_unary( - '/VectorService/Delete', - request_serializer=vector__service__pb2.DeleteRequest.SerializeToString, - response_deserializer=vector__service__pb2.DeleteResponse.FromString, - ) + "/VectorService/Delete", + request_serializer=vector__service__pb2.DeleteRequest.SerializeToString, + response_deserializer=vector__service__pb2.DeleteResponse.FromString, + ) self.Fetch = channel.unary_unary( - '/VectorService/Fetch', - request_serializer=vector__service__pb2.FetchRequest.SerializeToString, - response_deserializer=vector__service__pb2.FetchResponse.FromString, - ) + "/VectorService/Fetch", + request_serializer=vector__service__pb2.FetchRequest.SerializeToString, + response_deserializer=vector__service__pb2.FetchResponse.FromString, + ) self.List = channel.unary_unary( - '/VectorService/List', - request_serializer=vector__service__pb2.ListRequest.SerializeToString, - response_deserializer=vector__service__pb2.ListResponse.FromString, - ) + "/VectorService/List", + request_serializer=vector__service__pb2.ListRequest.SerializeToString, + response_deserializer=vector__service__pb2.ListResponse.FromString, + ) self.Query = channel.unary_unary( - '/VectorService/Query', - request_serializer=vector__service__pb2.QueryRequest.SerializeToString, - response_deserializer=vector__service__pb2.QueryResponse.FromString, - ) + "/VectorService/Query", + request_serializer=vector__service__pb2.QueryRequest.SerializeToString, + response_deserializer=vector__service__pb2.QueryResponse.FromString, + ) self.Update = channel.unary_unary( - '/VectorService/Update', - request_serializer=vector__service__pb2.UpdateRequest.SerializeToString, - response_deserializer=vector__service__pb2.UpdateResponse.FromString, - ) + "/VectorService/Update", + request_serializer=vector__service__pb2.UpdateRequest.SerializeToString, + response_deserializer=vector__service__pb2.UpdateResponse.FromString, + ) self.DescribeIndexStats = channel.unary_unary( - '/VectorService/DescribeIndexStats', - request_serializer=vector__service__pb2.DescribeIndexStatsRequest.SerializeToString, - response_deserializer=vector__service__pb2.DescribeIndexStatsResponse.FromString, - ) + "/VectorService/DescribeIndexStats", + request_serializer=vector__service__pb2.DescribeIndexStatsRequest.SerializeToString, + response_deserializer=vector__service__pb2.DescribeIndexStatsResponse.FromString, + ) class VectorServiceServicer(object): @@ -65,8 +66,8 @@ def Upsert(self, request, context): For guidance and examples, see [Upsert data](https://docs.pinecone.io/docs/upsert-data). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def Delete(self, request, context): """Delete vectors @@ -76,8 +77,8 @@ def Delete(self, request, context): For guidance and examples, see [Delete data](https://docs.pinecone.io/docs/delete-data). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def Fetch(self, request, context): """Fetch vectors @@ -87,8 +88,8 @@ def Fetch(self, request, context): For guidance and examples, see [Fetch data](https://docs.pinecone.io/docs/fetch-data). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def List(self, request, context): """List vector IDs @@ -102,8 +103,8 @@ def List(self, request, context): **Note:** `list` is supported only for serverless indexes. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def Query(self, request, context): """Query vectors @@ -113,8 +114,8 @@ def Query(self, request, context): For guidance and examples, see [Query data](https://docs.pinecone.io/docs/query-data). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def Update(self, request, context): """Update a vector @@ -124,8 +125,8 @@ def Update(self, request, context): For guidance and examples, see [Update data](https://docs.pinecone.io/docs/update-data). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def DescribeIndexStats(self, request, context): """Get index stats @@ -137,174 +138,257 @@ def DescribeIndexStats(self, request, context): For pod-based indexes, the index fullness result may be inaccurate during pod resizing; to get the status of a pod resizing process, use [`describe_index`](https://www.pinecone.io/docs/api/operation/describe_index/). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_VectorServiceServicer_to_server(servicer, server): rpc_method_handlers = { - 'Upsert': grpc.unary_unary_rpc_method_handler( - servicer.Upsert, - request_deserializer=vector__service__pb2.UpsertRequest.FromString, - response_serializer=vector__service__pb2.UpsertResponse.SerializeToString, - ), - 'Delete': grpc.unary_unary_rpc_method_handler( - servicer.Delete, - request_deserializer=vector__service__pb2.DeleteRequest.FromString, - response_serializer=vector__service__pb2.DeleteResponse.SerializeToString, - ), - 'Fetch': grpc.unary_unary_rpc_method_handler( - servicer.Fetch, - request_deserializer=vector__service__pb2.FetchRequest.FromString, - response_serializer=vector__service__pb2.FetchResponse.SerializeToString, - ), - 'List': grpc.unary_unary_rpc_method_handler( - servicer.List, - request_deserializer=vector__service__pb2.ListRequest.FromString, - response_serializer=vector__service__pb2.ListResponse.SerializeToString, - ), - 'Query': grpc.unary_unary_rpc_method_handler( - servicer.Query, - request_deserializer=vector__service__pb2.QueryRequest.FromString, - response_serializer=vector__service__pb2.QueryResponse.SerializeToString, - ), - 'Update': grpc.unary_unary_rpc_method_handler( - servicer.Update, - request_deserializer=vector__service__pb2.UpdateRequest.FromString, - response_serializer=vector__service__pb2.UpdateResponse.SerializeToString, - ), - 'DescribeIndexStats': grpc.unary_unary_rpc_method_handler( - servicer.DescribeIndexStats, - request_deserializer=vector__service__pb2.DescribeIndexStatsRequest.FromString, - response_serializer=vector__service__pb2.DescribeIndexStatsResponse.SerializeToString, - ), + "Upsert": grpc.unary_unary_rpc_method_handler( + servicer.Upsert, + request_deserializer=vector__service__pb2.UpsertRequest.FromString, + response_serializer=vector__service__pb2.UpsertResponse.SerializeToString, + ), + "Delete": grpc.unary_unary_rpc_method_handler( + servicer.Delete, + request_deserializer=vector__service__pb2.DeleteRequest.FromString, + response_serializer=vector__service__pb2.DeleteResponse.SerializeToString, + ), + "Fetch": grpc.unary_unary_rpc_method_handler( + servicer.Fetch, + request_deserializer=vector__service__pb2.FetchRequest.FromString, + response_serializer=vector__service__pb2.FetchResponse.SerializeToString, + ), + "List": grpc.unary_unary_rpc_method_handler( + servicer.List, + request_deserializer=vector__service__pb2.ListRequest.FromString, + response_serializer=vector__service__pb2.ListResponse.SerializeToString, + ), + "Query": grpc.unary_unary_rpc_method_handler( + servicer.Query, + request_deserializer=vector__service__pb2.QueryRequest.FromString, + response_serializer=vector__service__pb2.QueryResponse.SerializeToString, + ), + "Update": grpc.unary_unary_rpc_method_handler( + servicer.Update, + request_deserializer=vector__service__pb2.UpdateRequest.FromString, + response_serializer=vector__service__pb2.UpdateResponse.SerializeToString, + ), + "DescribeIndexStats": grpc.unary_unary_rpc_method_handler( + servicer.DescribeIndexStats, + request_deserializer=vector__service__pb2.DescribeIndexStatsRequest.FromString, + response_serializer=vector__service__pb2.DescribeIndexStatsResponse.SerializeToString, + ), } - generic_handler = grpc.method_handlers_generic_handler( - 'VectorService', rpc_method_handlers) + generic_handler = grpc.method_handlers_generic_handler("VectorService", rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) - # This class is part of an EXPERIMENTAL API. +# This class is part of an EXPERIMENTAL API. class VectorService(object): """The `VectorService` interface is exposed by Pinecone's vector index services. This service could also be called a `gRPC` service or a `REST`-like api. """ @staticmethod - def Upsert(request, + def Upsert( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/VectorService/Upsert', + "/VectorService/Upsert", vector__service__pb2.UpsertRequest.SerializeToString, vector__service__pb2.UpsertResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) @staticmethod - def Delete(request, + def Delete( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/VectorService/Delete', + "/VectorService/Delete", vector__service__pb2.DeleteRequest.SerializeToString, vector__service__pb2.DeleteResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) @staticmethod - def Fetch(request, + def Fetch( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/VectorService/Fetch', + "/VectorService/Fetch", vector__service__pb2.FetchRequest.SerializeToString, vector__service__pb2.FetchResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) @staticmethod - def List(request, + def List( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/VectorService/List', + "/VectorService/List", vector__service__pb2.ListRequest.SerializeToString, vector__service__pb2.ListResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) @staticmethod - def Query(request, + def Query( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/VectorService/Query', + "/VectorService/Query", vector__service__pb2.QueryRequest.SerializeToString, vector__service__pb2.QueryResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) @staticmethod - def Update(request, + def Update( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/VectorService/Update', + "/VectorService/Update", vector__service__pb2.UpdateRequest.SerializeToString, vector__service__pb2.UpdateResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) @staticmethod - def DescribeIndexStats(request, + def DescribeIndexStats( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - return grpc.experimental.unary_unary(request, target, '/VectorService/DescribeIndexStats', + "/VectorService/DescribeIndexStats", vector__service__pb2.DescribeIndexStatsRequest.SerializeToString, vector__service__pb2.DescribeIndexStatsResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) diff --git a/pinecone/data/__init__.py b/pinecone/data/__init__.py index 4caf309e..2c7e321d 100644 --- a/pinecone/data/__init__.py +++ b/pinecone/data/__init__.py @@ -1,10 +1,10 @@ from .index import * from .errors import ( VectorDictionaryMissingKeysError, - VectorDictionaryExcessKeysError, - VectorTupleLengthError, - SparseValuesTypeError, - SparseValuesMissingKeysError, - SparseValuesDictionaryExpectedError, - MetadataDictionaryExpectedError -) \ No newline at end of file + VectorDictionaryExcessKeysError, + VectorTupleLengthError, + SparseValuesTypeError, + SparseValuesMissingKeysError, + SparseValuesDictionaryExpectedError, + MetadataDictionaryExpectedError, +) diff --git a/pinecone/data/errors.py b/pinecone/data/errors.py index 7749a9cf..7df0dee0 100644 --- a/pinecone/data/errors.py +++ b/pinecone/data/errors.py @@ -1,36 +1,43 @@ from ..utils.constants import REQUIRED_VECTOR_FIELDS, OPTIONAL_VECTOR_FIELDS + class VectorDictionaryMissingKeysError(ValueError): def __init__(self, item): message = f"Vector dictionary is missing required fields: {list(REQUIRED_VECTOR_FIELDS - set(item.keys()))}" super().__init__(message) + class VectorDictionaryExcessKeysError(ValueError): def __init__(self, item): invalid_keys = list(set(item.keys()) - (REQUIRED_VECTOR_FIELDS | OPTIONAL_VECTOR_FIELDS)) message = f"Found excess keys in the vector dictionary: {invalid_keys}. The allowed keys are: {list(REQUIRED_VECTOR_FIELDS | OPTIONAL_VECTOR_FIELDS)}" super().__init__(message) + class VectorTupleLengthError(ValueError): def __init__(self, item): message = f"Found a tuple of length {len(item)} which is not supported. Vectors can be represented as tuples either the form (id, values, metadata) or (id, values). To pass sparse values please use either dicts or Vector objects as inputs." super().__init__(message) + class SparseValuesTypeError(ValueError, TypeError): def __init__(self): message = "Found unexpected data in column `sparse_values`. Expected format is `'sparse_values': {'indices': List[int], 'values': List[float]}`." super().__init__(message) + class SparseValuesMissingKeysError(ValueError): def __init__(self, sparse_values_dict): message = f"Missing required keys in data in column `sparse_values`. Expected format is `'sparse_values': {{'indices': List[int], 'values': List[float]}}`. Found keys {list(sparse_values_dict.keys())}" super().__init__(message) + class SparseValuesDictionaryExpectedError(ValueError, TypeError): def __init__(self, sparse_values_dict): message = f"Column `sparse_values` is expected to be a dictionary, found {type(sparse_values_dict)}" super().__init__(message) + class MetadataDictionaryExpectedError(ValueError, TypeError): def __init__(self, item): message = f"Column `metadata` is expected to be a dictionary, found {type(item['metadata'])}" diff --git a/pinecone/data/index.py b/pinecone/data/index.py index d613996e..44379ad3 100644 --- a/pinecone/data/index.py +++ b/pinecone/data/index.py @@ -21,7 +21,7 @@ DeleteRequest, UpdateRequest, DescribeIndexStatsRequest, - ListResponse + ListResponse, ) from pinecone.core.client.api.data_plane_api import DataPlaneApi from ..utils import setup_openapi_client @@ -58,42 +58,38 @@ "async_req", ) + def parse_query_response(response: QueryResponse): response._data_store.pop("results", None) return response -class Index(): +class Index: """ A client for interacting with a Pinecone index via REST API. For improved performance, use the Pinecone GRPC index client. """ def __init__( - self, - api_key: str, - host: str, - pool_threads: Optional[int] = 1, - additional_headers: Optional[Dict[str, str]] = {}, - openapi_config = None, - **kwargs - ): - self._config = ConfigBuilder.build( - api_key=api_key, - host=host, - additional_headers=additional_headers, - **kwargs - ) + self, + api_key: str, + host: str, + pool_threads: Optional[int] = 1, + additional_headers: Optional[Dict[str, str]] = {}, + openapi_config=None, + **kwargs, + ): + self._config = ConfigBuilder.build(api_key=api_key, host=host, additional_headers=additional_headers, **kwargs) openapi_config = ConfigBuilder.build_openapi_config(self._config, openapi_config) - + self._vector_api = setup_openapi_client( api_client_klass=ApiClient, api_klass=DataPlaneApi, config=self._config, openapi_config=openapi_config, - pool_threads=pool_threads + pool_threads=pool_threads, ) - + def __enter__(self): return self @@ -187,7 +183,11 @@ def upsert( return UpsertResponse(upserted_count=total_upserted) def _upsert_batch( - self, vectors: Union[List[Vector], List[tuple], List[dict]], namespace: Optional[str], _check_type: bool, **kwargs + self, + vectors: Union[List[Vector], List[tuple], List[dict]], + namespace: Optional[str], + _check_type: bool, + **kwargs, ) -> UpsertResponse: args_dict = self._parse_non_empty_args([("namespace", namespace)]) vec_builder = lambda v: VectorFactory.build(v, check_type=_check_type) @@ -375,13 +375,15 @@ def query( sparse_vector: (Union[SparseValues, Dict[str, Union[List[float], List[int]]]]): sparse values of the query vector. Expected to be either a SparseValues object or a dict of the form: {'indices': List[int], 'values': List[float]}, where the lists each have the same length. - + Returns: QueryResponse object which contains the list of the closest vectors as ScoredVector objects, and namespace name. """ if len(args) > 0: - raise ValueError("The argument order for `query()` has changed; please use keyword arguments instead of positional arguments. Example: index.query(vector=[0.1, 0.2, 0.3], top_k=10, namespace='my_namespace')") + raise ValueError( + "The argument order for `query()` has changed; please use keyword arguments instead of positional arguments. Example: index.query(vector=[0.1, 0.2, 0.3], top_k=10, namespace='my_namespace')" + ) if vector is not None and id is not None: raise ValueError("Cannot specify both `id` and `vector`") @@ -505,7 +507,7 @@ def describe_index_stats( ), **{k: v for k, v in kwargs.items() if k in _OPENAPI_ENDPOINT_PARAMS}, ) - + @validate_and_convert_errors def list_paginated( self, @@ -513,13 +515,13 @@ def list_paginated( limit: Optional[int] = None, pagination_token: Optional[str] = None, namespace: Optional[str] = None, - **kwargs - ) -> ListResponse: + **kwargs, + ) -> ListResponse: """ The list_paginated operation finds vectors based on an id prefix within a single namespace. It returns matching ids in a paginated form, with a pagination token to fetch the next page of results. This id list can then be passed to fetch or delete operations, depending on your use case. - + Consider using the `list` method to avoid having to handle pagination tokens manually. Examples: @@ -531,13 +533,13 @@ def list_paginated( >>> next_results = index.list_paginated(prefix='99', limit=5, namespace='my_namespace', pagination_token=results.pagination.next) Args: - prefix (Optional[str]): The id prefix to match. If unspecified, an empty string prefix will + prefix (Optional[str]): The id prefix to match. If unspecified, an empty string prefix will be used with the effect of listing all ids in a namespace [optional] limit (Optional[int]): The maximum number of ids to return. If unspecified, the server will use a default value. [optional] - pagination_token (Optional[str]): A token needed to fetch the next page of results. This token is returned + pagination_token (Optional[str]): A token needed to fetch the next page of results. This token is returned in the response if additional results are available. [optional] namespace (Optional[str]): The namespace to fetch vectors from. If not specified, the default namespace is used. [optional] - + Returns: ListResponse object which contains the list of ids, the namespace name, pagination information, and usage showing the number of read_units consumed. """ args_dict = self._parse_non_empty_args( @@ -565,10 +567,10 @@ def list(self, **kwargs): ['999'] Args: - prefix (Optional[str]): The id prefix to match. If unspecified, an empty string prefix will + prefix (Optional[str]): The id prefix to match. If unspecified, an empty string prefix will be used with the effect of listing all ids in a namespace [optional] limit (Optional[int]): The maximum number of ids to return. If unspecified, the server will use a default value. [optional] - pagination_token (Optional[str]): A token needed to fetch the next page of results. This token is returned + pagination_token (Optional[str]): A token needed to fetch the next page of results. This token is returned in the response if additional results are available. [optional] namespace (Optional[str]): The namespace to fetch vectors from. If not specified, the default namespace is used. [optional] """ @@ -577,7 +579,7 @@ def list(self, **kwargs): results = self.list_paginated(**kwargs) if len(results.vectors) > 0: yield [v.id for v in results.vectors] - + if results.pagination: kwargs.update({"pagination_token": results.pagination.next}) else: diff --git a/pinecone/data/sparse_vector_factory.py b/pinecone/data/sparse_vector_factory.py index e759ffdd..7376e872 100644 --- a/pinecone/data/sparse_vector_factory.py +++ b/pinecone/data/sparse_vector_factory.py @@ -5,15 +5,10 @@ from ..utils import convert_to_list -from .errors import ( - SparseValuesTypeError, - SparseValuesMissingKeysError, - SparseValuesDictionaryExpectedError -) +from .errors import SparseValuesTypeError, SparseValuesMissingKeysError, SparseValuesDictionaryExpectedError + +from pinecone.core.client.models import SparseValues -from pinecone.core.client.models import ( - SparseValues -) class SparseValuesFactory: @staticmethod @@ -37,18 +32,18 @@ def build(input: Union[Dict, SparseValues]) -> SparseValues: return SparseValues(indices=indices, values=values) except TypeError as e: raise SparseValuesTypeError() from e - + @staticmethod def _convert_to_list(input, expected_type): try: converted = convert_to_list(input) except TypeError as e: raise SparseValuesTypeError() from e - + SparseValuesFactory._validate_list_items_type(converted, expected_type) return converted - + @staticmethod def _validate_list_items_type(input, expected_type): if len(input) > 0 and not isinstance(input[0], expected_type): - raise SparseValuesTypeError() \ No newline at end of file + raise SparseValuesTypeError() diff --git a/pinecone/data/vector_factory.py b/pinecone/data/vector_factory.py index 3b486b1c..fddcf4d6 100644 --- a/pinecone/data/vector_factory.py +++ b/pinecone/data/vector_factory.py @@ -7,10 +7,7 @@ from ..utils.constants import REQUIRED_VECTOR_FIELDS, OPTIONAL_VECTOR_FIELDS from .sparse_vector_factory import SparseValuesFactory -from pinecone.core.client.models import ( - Vector, - SparseValues -) +from pinecone.core.client.models import Vector, SparseValues from .errors import ( VectorDictionaryMissingKeysError, @@ -19,6 +16,7 @@ MetadataDictionaryExpectedError, ) + class VectorFactory: @staticmethod def build(item: Union[Vector, Tuple, Dict], check_type: bool = True) -> Vector: @@ -37,7 +35,9 @@ def _tuple_to_vector(item, check_type: bool) -> Vector: raise VectorTupleLengthError(item) id, values, metadata = fix_tuple_length(item, 3) if isinstance(values, SparseValues): - raise ValueError("Sparse values are not supported in tuples. Please use either dicts or Vector objects as inputs.") + raise ValueError( + "Sparse values are not supported in tuples. Please use either dicts or Vector objects as inputs." + ) else: return Vector(id=id, values=convert_to_list(values), metadata=metadata or {}, _check_type=check_type) @@ -46,7 +46,7 @@ def _dict_to_vector(item, check_type: bool) -> Vector: item_keys = set(item.keys()) if not item_keys.issuperset(REQUIRED_VECTOR_FIELDS): raise VectorDictionaryMissingKeysError(item) - + excessive_keys = item_keys - (REQUIRED_VECTOR_FIELDS | OPTIONAL_VECTOR_FIELDS) if len(excessive_keys) > 0: raise VectorDictionaryExcessKeysError(item) @@ -68,6 +68,8 @@ def _dict_to_vector(item, check_type: bool) -> Vector: try: return Vector(**item, _check_type=check_type) except TypeError as e: - if not isinstance(item["values"], Iterable) or not isinstance(item["values"].__iter__().__next__(), numbers.Real): + if not isinstance(item["values"], Iterable) or not isinstance( + item["values"].__iter__().__next__(), numbers.Real + ): raise TypeError(f"Column `values` is expected to be a list of floats") raise e diff --git a/pinecone/deprecation_warnings.py b/pinecone/deprecation_warnings.py index e0e20105..ec458e78 100644 --- a/pinecone/deprecation_warnings.py +++ b/pinecone/deprecation_warnings.py @@ -7,6 +7,7 @@ def _build_class_migration_message(method_name: str, example: str): {example} """ + def init(*args, **kwargs): example = """ import os @@ -37,6 +38,7 @@ def init(*args, **kwargs): """ raise AttributeError(msg) + def list_indexes(*args, **kwargs): example = """ from pinecone import Pinecone @@ -48,7 +50,8 @@ def list_indexes(*args, **kwargs): if index_name not in pc.list_indexes().names(): # do something """ - raise AttributeError(_build_class_migration_message('list_indexes', example)) + raise AttributeError(_build_class_migration_message("list_indexes", example)) + def describe_index(*args, **kwargs): example = """ @@ -57,7 +60,7 @@ def describe_index(*args, **kwargs): pc = Pinecone(api_key='YOUR_API_KEY') pc.describe_index('my_index') """ - raise AttributeError(_build_class_migration_message('describe_index', example)) + raise AttributeError(_build_class_migration_message("describe_index", example)) def create_index(*args, **kwargs): @@ -75,7 +78,8 @@ def create_index(*args, **kwargs): ) ) """ - raise AttributeError(_build_class_migration_message('create_index', example)) + raise AttributeError(_build_class_migration_message("create_index", example)) + def delete_index(*args, **kwargs): example = """ @@ -84,7 +88,8 @@ def delete_index(*args, **kwargs): pc = Pinecone(api_key='YOUR_API_KEY') pc.delete_index('my_index') """ - raise AttributeError(_build_class_migration_message('delete_index', example)) + raise AttributeError(_build_class_migration_message("delete_index", example)) + def scale_index(*args, **kwargs): example = """ @@ -111,7 +116,8 @@ def create_collection(*args, **kwargs): pc = Pinecone(api_key='YOUR_API_KEY') pc.create_collection(name='my_collection', source='my_index') """ - raise AttributeError(_build_class_migration_message('create_collection', example)) + raise AttributeError(_build_class_migration_message("create_collection", example)) + def list_collections(*args, **kwargs): example = """ @@ -120,7 +126,8 @@ def list_collections(*args, **kwargs): pc = Pinecone(api_key='YOUR_API_KEY') pc.list_collections() """ - raise AttributeError(_build_class_migration_message('list_collections', example)) + raise AttributeError(_build_class_migration_message("list_collections", example)) + def delete_collection(*args, **kwargs): example = """ @@ -129,7 +136,8 @@ def delete_collection(*args, **kwargs): pc = Pinecone(api_key='YOUR_API_KEY') pc.delete_collection('my_collection') """ - raise AttributeError(_build_class_migration_message('delete_collection', example)) + raise AttributeError(_build_class_migration_message("delete_collection", example)) + def describe_collection(*args, **kwargs): example = """ @@ -138,7 +146,7 @@ def describe_collection(*args, **kwargs): pc = Pinecone(api_key='YOUR_API_KEY') pc.describe_collection('my_collection') """ - raise AttributeError(_build_class_migration_message('describe_collection', example)) + raise AttributeError(_build_class_migration_message("describe_collection", example)) def configure_index(*args, **kwargs): @@ -148,4 +156,4 @@ def configure_index(*args, **kwargs): pc = Pinecone(api_key='YOUR_API_KEY') pc.configure_index('my_index', replicas=2) """ - raise AttributeError(_build_class_migration_message('configure_index', example)) \ No newline at end of file + raise AttributeError(_build_class_migration_message("configure_index", example)) diff --git a/pinecone/exceptions.py b/pinecone/exceptions.py index 81a52f61..dfe564c6 100644 --- a/pinecone/exceptions.py +++ b/pinecone/exceptions.py @@ -11,16 +11,20 @@ ServiceException, ) + class PineconeProtocolError(PineconeException): """Raised when something unexpected happens mid-request/response.""" + class PineconeConfigurationError(PineconeException): """Raised when a configuration error occurs.""" + class ListConversionException(PineconeException, TypeError): def __init__(self, message): super().__init__(message) + __all__ = [ "PineconeConfigurationError", "PineconeProtocolError", @@ -34,5 +38,5 @@ def __init__(self, message): "UnauthorizedException", "ForbiddenException", "ServiceException", - "ListConversionException" + "ListConversionException", ] diff --git a/pinecone/grpc/__init__.py b/pinecone/grpc/__init__.py index 0a7670ef..19b92afa 100644 --- a/pinecone/grpc/__init__.py +++ b/pinecone/grpc/__init__.py @@ -52,5 +52,5 @@ Vector as GRPCVector, SparseValues as GRPCSparseValues, Vector, - SparseValues -) \ No newline at end of file + SparseValues, +) diff --git a/pinecone/grpc/base.py b/pinecone/grpc/base.py index 01413024..75fe4a64 100644 --- a/pinecone/grpc/base.py +++ b/pinecone/grpc/base.py @@ -41,11 +41,7 @@ def __init__( self.grpc_client_config = grpc_config or GRPCClientConfig() self.retry_config = self.grpc_client_config.retry_config or RetryConfig() - self.fixed_metadata = { - "api-key": config.api_key, - "service-name": index_name, - "client-version": CLIENT_VERSION - } + self.fixed_metadata = {"api-key": config.api_key, "service-name": index_name, "client-version": CLIENT_VERSION} if self.grpc_client_config.additional_metadata: self.fixed_metadata.update(self.grpc_client_config.additional_metadata) diff --git a/pinecone/grpc/index_grpc.py b/pinecone/grpc/index_grpc.py index 4f5a848a..cbca6f99 100644 --- a/pinecone/grpc/index_grpc.py +++ b/pinecone/grpc/index_grpc.py @@ -13,10 +13,7 @@ QueryResponse, DescribeIndexStatsResponse, ) -from pinecone.models.list_response import ( - ListResponse as SimpleListResponse, - Pagination -) +from pinecone.models.list_response import ListResponse as SimpleListResponse, Pagination from pinecone.core.grpc.protos.vector_service_pb2 import ( Vector as GRPCVector, QueryVector as GRPCQueryVector, @@ -43,10 +40,12 @@ _logger = logging.getLogger(__name__) + class SparseVectorTypedDict(TypedDict): indices: List[int] values: List[float] + class GRPCIndex(GRPCIndexBase): """A client for interacting with a Pinecone index via GRPC API.""" @@ -193,12 +192,14 @@ def upsert_from_dataframe( if use_async_requests: cast_results = cast(List[PineconeGrpcFuture], results) - results = [async_result.result() for async_result in - tqdm(cast_results, disable=not show_progress, desc="collecting async responses")] + results = [ + async_result.result() + for async_result in tqdm(cast_results, disable=not show_progress, desc="collecting async responses") + ] upserted_count = 0 for res in results: - if hasattr(res, 'upserted_count') and isinstance(res.upserted_count, int): + if hasattr(res, "upserted_count") and isinstance(res.upserted_count, int): upserted_count += res.upserted_count return UpsertResponse(upserted_count=upserted_count) @@ -435,18 +436,18 @@ def update( return self._wrap_grpc_call(self.stub.Update, request, timeout=timeout) def list_paginated( - self, - prefix: Optional[str] = None, - limit: Optional[int] = None, - pagination_token: Optional[str] = None, - namespace: Optional[str] = None, - **kwargs - ) -> SimpleListResponse: + self, + prefix: Optional[str] = None, + limit: Optional[int] = None, + pagination_token: Optional[str] = None, + namespace: Optional[str] = None, + **kwargs, + ) -> SimpleListResponse: """ The list_paginated operation finds vectors based on an id prefix within a single namespace. It returns matching ids in a paginated form, with a pagination token to fetch the next page of results. This id list can then be passed to fetch or delete operations, depending on your use case. - + Consider using the `list` method to avoid having to handle pagination tokens manually. Examples: @@ -458,13 +459,13 @@ def list_paginated( >>> next_results = index.list_paginated(prefix='99', limit=5, namespace='my_namespace', pagination_token=results.pagination.next) Args: - prefix (Optional[str]): The id prefix to match. If unspecified, an empty string prefix will + prefix (Optional[str]): The id prefix to match. If unspecified, an empty string prefix will be used with the effect of listing all ids in a namespace [optional] limit (Optional[int]): The maximum number of ids to return. If unspecified, the server will use a default value. [optional] - pagination_token (Optional[str]): A token needed to fetch the next page of results. This token is returned + pagination_token (Optional[str]): A token needed to fetch the next page of results. This token is returned in the response if additional results are available. [optional] namespace (Optional[str]): The namespace to fetch vectors from. If not specified, the default namespace is used. [optional] - + Returns: SimpleListResponse object which contains the list of ids, the namespace name, pagination information, and usage showing the number of read_units consumed. """ args_dict = self._parse_non_empty_args( @@ -478,18 +479,18 @@ def list_paginated( request = ListRequest(**args_dict, **kwargs) timeout = kwargs.pop("timeout", None) response = self._wrap_grpc_call(self.stub.List, request, timeout=timeout) - - if response.pagination and response.pagination.next != '': + + if response.pagination and response.pagination.next != "": pagination = Pagination(next=response.pagination.next) else: pagination = None - + return SimpleListResponse( namespace=response.namespace, vectors=response.vectors, pagination=pagination, ) - + def list(self, **kwargs): """ The list operation accepts all of the same arguments as list_paginated, and returns a generator that yields @@ -504,10 +505,10 @@ def list(self, **kwargs): ['999'] Args: - prefix (Optional[str]): The id prefix to match. If unspecified, an empty string prefix will + prefix (Optional[str]): The id prefix to match. If unspecified, an empty string prefix will be used with the effect of listing all ids in a namespace [optional] limit (Optional[int]): The maximum number of ids to return. If unspecified, the server will use a default value. [optional] - pagination_token (Optional[str]): A token needed to fetch the next page of results. This token is returned + pagination_token (Optional[str]): A token needed to fetch the next page of results. This token is returned in the response if additional results are available. [optional] namespace (Optional[str]): The namespace to fetch vectors from. If not specified, the default namespace is used. [optional] """ @@ -517,10 +518,10 @@ def list(self, **kwargs): results = self.list_paginated(**kwargs) except Exception as e: raise e - + if len(results.vectors) > 0: yield [v.id for v in results.vectors] - + if results.pagination and results.pagination.next: kwargs.update({"pagination_token": results.pagination.next}) else: diff --git a/pinecone/grpc/pinecone.py b/pinecone/grpc/pinecone.py index 3f2ddb46..830f6eab 100644 --- a/pinecone/grpc/pinecone.py +++ b/pinecone/grpc/pinecone.py @@ -2,6 +2,7 @@ from ..config.config import ConfigBuilder from .index_grpc import GRPCIndex + class PineconeGRPC(Pinecone): """ An alternative version of the Pinecone client that uses gRPC instead of HTTP for @@ -45,8 +46,8 @@ class PineconeGRPC(Pinecone): ``` """ - - def Index(self, name: str = '', host: str = '', **kwargs): + + def Index(self, name: str = "", host: str = "", **kwargs): """ Target an index for data operations. @@ -54,7 +55,7 @@ def Index(self, name: str = '', host: str = '', **kwargs): In production situations, you want to uspert or query your data as quickly as possible. If you know in advance the host url of your index, you can - eliminate a round trip to the Pinecone control plane by specifying the + eliminate a round trip to the Pinecone control plane by specifying the host of the index. ```python @@ -63,7 +64,7 @@ def Index(self, name: str = '', host: str = '', **kwargs): api_key = os.environ.get("PINECONE_API_KEY") index_host = os.environ.get("PINECONE_INDEX_HOST") - + pc = PineconeGRPC(api_key=api_key) index = pc.Index(host=index_host) @@ -90,11 +91,11 @@ def Index(self, name: str = '', host: str = '', **kwargs): For more casual usage, such as when you are playing and exploring with Pinecone in a notebook setting, you can also target an index by name. If you use this - approach, the client may need to perform an extra call to the Pinecone control + approach, the client may need to perform an extra call to the Pinecone control plane to get the host url on your behalf to get the index host. - The client will cache the index host for future use whenever it is seen, so you - will only incur the overhead of only one call. But this approach is not + The client will cache the index host for future use whenever it is seen, so you + will only incur the overhead of only one call. But this approach is not recommended for production usage. ```python @@ -103,7 +104,7 @@ def Index(self, name: str = '', host: str = '', **kwargs): from pinecone.grpc import PineconeGRPC api_key = os.environ.get("PINECONE_API_KEY") - + pc = PineconeGRPC(api_key=api_key) pc.create_index( name='my-index', @@ -117,15 +118,17 @@ def Index(self, name: str = '', host: str = '', **kwargs): index.query(vector=[...], top_k=10) ``` """ - if name == '' and host == '': + if name == "" and host == "": raise ValueError("Either name or host must be specified") # Use host if it is provided, otherwise get host from describe_index index_host = host or self.index_host_store.get_host(self.index_api, self.config, name) - config = ConfigBuilder.build(api_key=self.config.api_key, - host=index_host, - source_tag=self.config.source_tag, - proxy_url=self.config.proxy_url, - ssl_ca_certs=self.config.ssl_ca_certs) - return GRPCIndex(index_name=name, config=config, **kwargs) \ No newline at end of file + config = ConfigBuilder.build( + api_key=self.config.api_key, + host=index_host, + source_tag=self.config.source_tag, + proxy_url=self.config.proxy_url, + ssl_ca_certs=self.config.ssl_ca_certs, + ) + return GRPCIndex(index_name=name, config=config, **kwargs) diff --git a/pinecone/grpc/sparse_values_factory.py b/pinecone/grpc/sparse_values_factory.py index 452f131d..8844bfd2 100644 --- a/pinecone/grpc/sparse_values_factory.py +++ b/pinecone/grpc/sparse_values_factory.py @@ -5,18 +5,13 @@ from ..utils import convert_to_list -from ..data import ( - SparseValuesTypeError, - SparseValuesMissingKeysError, - SparseValuesDictionaryExpectedError -) +from ..data import SparseValuesTypeError, SparseValuesMissingKeysError, SparseValuesDictionaryExpectedError from pinecone.core.grpc.protos.vector_service_pb2 import ( SparseValues as GRPCSparseValues, ) -from pinecone import ( - SparseValues as NonGRPCSparseValues -) +from pinecone import SparseValues as NonGRPCSparseValues + class SparseValuesFactory: @staticmethod @@ -42,18 +37,18 @@ def build(input: Union[Dict, GRPCSparseValues, NonGRPCSparseValues]) -> GRPCSpar return GRPCSparseValues(indices=indices, values=values) except TypeError as e: raise SparseValuesTypeError() from e - + @staticmethod def _convert_to_list(input, expected_type): try: converted = convert_to_list(input) except TypeError as e: raise SparseValuesTypeError() from e - + SparseValuesFactory._validate_list_items_type(converted, expected_type) return converted - + @staticmethod def _validate_list_items_type(input, expected_type): if len(input) > 0 and not isinstance(input[0], expected_type): - raise SparseValuesTypeError() \ No newline at end of file + raise SparseValuesTypeError() diff --git a/pinecone/grpc/utils.py b/pinecone/grpc/utils.py index c03cfc0f..87af9842 100644 --- a/pinecone/grpc/utils.py +++ b/pinecone/grpc/utils.py @@ -2,9 +2,11 @@ from google.protobuf.struct_pb2 import Struct + def _generate_request_id() -> str: return str(uuid.uuid4()) + from pinecone.core.client.models import ( Vector as _Vector, Usage, @@ -18,6 +20,7 @@ def _generate_request_id() -> str: from typing import Optional + def dict_to_proto_struct(d: Optional[dict]) -> "Struct": if not d: d = {} @@ -25,6 +28,7 @@ def dict_to_proto_struct(d: Optional[dict]) -> "Struct": s.update(d) return s + def parse_sparse_values(sparse_values: dict): return ( SparseValues(indices=sparse_values["indices"], values=sparse_values["values"]) @@ -46,12 +50,9 @@ def parse_fetch_response(response: dict): metadata=vec.get("metadata", None), _check_type=False, ) - + return FetchResponse( - vectors=vd, - namespace=namespace, - usage=parse_usage(response.get("usage", {})), - _check_type=False + vectors=vd, namespace=namespace, usage=parse_usage(response.get("usage", {})), _check_type=False ) @@ -75,14 +76,13 @@ def parse_query_response(response: dict, _check_type: bool = False): # Due to OpenAPI model classes / actual parsing cost, we want to avoid # creating empty `Usage` objects and then passing them into QueryResponse # when they are not actually present in the response from the server. - args = {'namespace': response.get("namespace", ""), - 'matches': matches, - '_check_type': _check_type} + args = {"namespace": response.get("namespace", ""), "matches": matches, "_check_type": _check_type} usage = response.get("usage") if usage: - args['usage'] = parse_usage(usage) + args["usage"] = parse_usage(usage) return QueryResponse(**args) + def parse_stats_response(response: dict): fullness = response.get("indexFullness", 0.0) total_vector_count = response.get("totalVectorCount", 0) diff --git a/pinecone/grpc/vector_factory_grpc.py b/pinecone/grpc/vector_factory_grpc.py index 2ea62a08..a1717d83 100644 --- a/pinecone/grpc/vector_factory_grpc.py +++ b/pinecone/grpc/vector_factory_grpc.py @@ -9,10 +9,10 @@ from ..utils import fix_tuple_length, convert_to_list from ..utils.constants import REQUIRED_VECTOR_FIELDS, OPTIONAL_VECTOR_FIELDS from ..data import ( - VectorDictionaryMissingKeysError, - VectorDictionaryExcessKeysError, - VectorTupleLengthError, - MetadataDictionaryExpectedError + VectorDictionaryMissingKeysError, + VectorDictionaryExcessKeysError, + VectorTupleLengthError, + MetadataDictionaryExpectedError, ) from .sparse_values_factory import SparseValuesFactory @@ -20,10 +20,8 @@ Vector as GRPCVector, SparseValues as GRPCSparseValues, ) -from pinecone import ( - Vector as NonGRPCVector, - SparseValues as NonGRPCSparseValues -) +from pinecone import Vector as NonGRPCVector, SparseValues as NonGRPCSparseValues + class VectorFactoryGRPC: @staticmethod @@ -33,7 +31,9 @@ def build(item: Union[GRPCVector, NonGRPCVector, Tuple, Dict]) -> GRPCVector: elif isinstance(item, NonGRPCVector): if item.sparse_values: sv = GRPCSparseValues(indices=item.sparse_values.indices, values=item.sparse_values.values) - return GRPCVector(id=item.id, values=item.values, metadata=dict_to_proto_struct(item.metadata or {}), sparse_values=sv) + return GRPCVector( + id=item.id, values=item.values, metadata=dict_to_proto_struct(item.metadata or {}), sparse_values=sv + ) else: return GRPCVector(id=item.id, values=item.values, metadata=dict_to_proto_struct(item.metadata or {})) elif isinstance(item, tuple): @@ -49,7 +49,9 @@ def _tuple_to_vector(item) -> GRPCVector: raise VectorTupleLengthError(item) id, values, metadata = fix_tuple_length(item, 3) if isinstance(values, GRPCSparseValues) or isinstance(values, NonGRPCSparseValues): - raise ValueError("Sparse values are not supported in tuples. Please use either dicts or Vector objects as inputs.") + raise ValueError( + "Sparse values are not supported in tuples. Please use either dicts or Vector objects as inputs." + ) else: return GRPCVector(id=id, values=convert_to_list(values), metadata=dict_to_proto_struct(metadata or {})) @@ -58,7 +60,7 @@ def _dict_to_vector(item) -> GRPCVector: item_keys = set(item.keys()) if not item_keys.issuperset(REQUIRED_VECTOR_FIELDS): raise VectorDictionaryMissingKeysError(item) - + excessive_keys = item_keys - (REQUIRED_VECTOR_FIELDS | OPTIONAL_VECTOR_FIELDS) if len(excessive_keys) > 0: raise VectorDictionaryExcessKeysError(item) @@ -89,8 +91,12 @@ def _dict_to_vector(item) -> GRPCVector: # Where possible raise a more specific error to the user. vid = item.get("id") if not isinstance(vid, bytes) and not isinstance(vid, str): - raise TypeError(f"Cannot set Vector.id to {vid}: {vid} has type {type(vid)}, " - "but expected one of: (, ) for field Vector.id") - if not isinstance(item["values"], Iterable) or not isinstance(item["values"].__iter__().__next__(), numbers.Real): + raise TypeError( + f"Cannot set Vector.id to {vid}: {vid} has type {type(vid)}, " + "but expected one of: (, ) for field Vector.id" + ) + if not isinstance(item["values"], Iterable) or not isinstance( + item["values"].__iter__().__next__(), numbers.Real + ): raise TypeError(f"Column `values` is expected to be a list of floats") - raise e \ No newline at end of file + raise e diff --git a/pinecone/models/__init__.py b/pinecone/models/__init__.py index 19c506c3..6acf8d65 100644 --- a/pinecone/models/__init__.py +++ b/pinecone/models/__init__.py @@ -6,13 +6,13 @@ from .collection_list import CollectionList __all__ = [ - 'CollectionDescription', - 'IndexDescription', - 'IndexStatus', - 'PodSpec', - 'PodSpecDefinition', - 'ServerlessSpec', - 'ServerlessSpecDefinition', - 'IndexList', - 'CollectionList' -] \ No newline at end of file + "CollectionDescription", + "IndexDescription", + "IndexStatus", + "PodSpec", + "PodSpecDefinition", + "ServerlessSpec", + "ServerlessSpecDefinition", + "IndexList", + "CollectionList", +] diff --git a/pinecone/models/collection_description.py b/pinecone/models/collection_description.py index 533d9745..a5e38ce6 100644 --- a/pinecone/models/collection_description.py +++ b/pinecone/models/collection_description.py @@ -1,5 +1,6 @@ from typing import NamedTuple + class CollectionDescription(NamedTuple): """ The description of a collection. @@ -13,4 +14,4 @@ class CollectionDescription(NamedTuple): source: str """ The name of the index used to create the collection. - """ \ No newline at end of file + """ diff --git a/pinecone/models/collection_list.py b/pinecone/models/collection_list.py index b7f4472c..8d69d542 100644 --- a/pinecone/models/collection_list.py +++ b/pinecone/models/collection_list.py @@ -1,31 +1,32 @@ from pinecone.core.client.models import CollectionList as OpenAPICollectionList + class CollectionList: """ A list of collections. """ - + def __init__(self, collection_list: OpenAPICollectionList): self.collection_list = collection_list self.current = 0 def names(self): - return [i['name'] for i in self.collection_list.collections] + return [i["name"] for i in self.collection_list.collections] def __getitem__(self, key): return self.collection_list.collections[key] - + def __len__(self): return len(self.collection_list.collections) - + def __iter__(self): return iter(self.collection_list.collections) - + def __str__(self): return str(self.collection_list) - + def __repr__(self): return repr(self.collection_list) def __getattr__(self, attr): - return getattr(self.collection_list, attr) \ No newline at end of file + return getattr(self.collection_list, attr) diff --git a/pinecone/models/index_description.py b/pinecone/models/index_description.py index 8566cae5..f6d2d5ba 100644 --- a/pinecone/models/index_description.py +++ b/pinecone/models/index_description.py @@ -1,10 +1,14 @@ from typing import NamedTuple, Dict, Optional, Union, Literal + class IndexStatus(NamedTuple): state: str ready: bool -PodKey = Literal['pod'] + +PodKey = Literal["pod"] + + class PodSpecDefinition(NamedTuple): replicas: int shards: int @@ -13,18 +17,23 @@ class PodSpecDefinition(NamedTuple): environment: str metadata_config: Optional[Dict] + PodSpec = Dict[PodKey, PodSpecDefinition] -ServerlessKey = Literal['serverless'] +ServerlessKey = Literal["serverless"] + + class ServerlessSpecDefinition(NamedTuple): cloud: str region: str + ServerlessSpec = Dict[ServerlessKey, ServerlessSpecDefinition] + class IndexDescription(NamedTuple): """ - The description of an index. This object is returned from the `describe_index()` method. + The description of an index. This object is returned from the `describe_index()` method. """ name: str diff --git a/pinecone/models/index_list.py b/pinecone/models/index_list.py index d5454c2b..21fc4444 100644 --- a/pinecone/models/index_list.py +++ b/pinecone/models/index_list.py @@ -1,27 +1,28 @@ from pinecone.core.client.models import IndexList as OpenAPIIndexList + class IndexList: def __init__(self, index_list: OpenAPIIndexList): self.index_list = index_list self.current = 0 def names(self): - return [i['name'] for i in self.index_list.indexes] + return [i["name"] for i in self.index_list.indexes] def __getitem__(self, key): return self.index_list.indexes[key] - + def __len__(self): return len(self.index_list.indexes) - + def __iter__(self): return iter(self.index_list.indexes) - + def __str__(self): return str(self.index_list) - + def __repr__(self): return repr(self.index_list) def __getattr__(self, attr): - return getattr(self.index_list, attr) \ No newline at end of file + return getattr(self.index_list, attr) diff --git a/pinecone/models/list_response.py b/pinecone/models/list_response.py index c1ce1174..c3ba57d4 100644 --- a/pinecone/models/list_response.py +++ b/pinecone/models/list_response.py @@ -1,8 +1,10 @@ from typing import NamedTuple, Optional, List + class Pagination(NamedTuple): next: str + class ListResponse(NamedTuple): namespace: str vectors: List diff --git a/pinecone/models/pod_spec.py b/pinecone/models/pod_spec.py index e70c6ff3..2f37fd81 100644 --- a/pinecone/models/pod_spec.py +++ b/pinecone/models/pod_spec.py @@ -1,12 +1,13 @@ from typing import NamedTuple, Optional, Dict + class PodSpec(NamedTuple): """ PodSpec represents the configuration used to deploy a pod-based index. - + To learn more about the options for each configuration, please see [Understanding Indexes](https://docs.pinecone.io/docs/indexes) """ - + environment: str """ The environment where the pod index will be deployed. Example: 'us-east1-gcp' @@ -69,4 +70,4 @@ def asdict(self): """ Returns the PodSpec as a dictionary. """ - return {"pod": self._asdict()} \ No newline at end of file + return {"pod": self._asdict()} diff --git a/pinecone/models/serverless_spec.py b/pinecone/models/serverless_spec.py index 33f8fdbc..4ae549f5 100644 --- a/pinecone/models/serverless_spec.py +++ b/pinecone/models/serverless_spec.py @@ -1,8 +1,9 @@ from typing import NamedTuple + class ServerlessSpec(NamedTuple): cloud: str region: str def asdict(self): - return {"serverless": self._asdict()} \ No newline at end of file + return {"serverless": self._asdict()} diff --git a/pinecone/utils/__init__.py b/pinecone/utils/__init__.py index e72df335..640fdf63 100644 --- a/pinecone/utils/__init__.py +++ b/pinecone/utils/__init__.py @@ -6,4 +6,4 @@ from .convert_to_list import convert_to_list from .normalize_host import normalize_host from .setup_openapi_client import setup_openapi_client, build_plugin_setup_client -from .docslinks import docslinks \ No newline at end of file +from .docslinks import docslinks diff --git a/pinecone/utils/check_kwargs.py b/pinecone/utils/check_kwargs.py index f03bc7b7..9c9cd330 100644 --- a/pinecone/utils/check_kwargs.py +++ b/pinecone/utils/check_kwargs.py @@ -1,6 +1,7 @@ import inspect import logging + def check_kwargs(caller, given): argspec = inspect.getfullargspec(caller) diff = set(given).difference(argspec.args) diff --git a/pinecone/utils/convert_to_list.py b/pinecone/utils/convert_to_list.py index eb57ef72..9bd99923 100644 --- a/pinecone/utils/convert_to_list.py +++ b/pinecone/utils/convert_to_list.py @@ -1,11 +1,12 @@ from ..exceptions import ListConversionException + def convert_to_list(obj): class_name = obj.__class__.__name__ - if class_name == 'list': + if class_name == "list": return obj - elif hasattr(obj, 'tolist') and callable(getattr(obj, 'tolist')): + elif hasattr(obj, "tolist") and callable(getattr(obj, "tolist")): return obj.tolist() elif obj is None or isinstance(obj, str) or isinstance(obj, dict): # The string and dictionary classes in python can be passed to list() @@ -15,4 +16,4 @@ def convert_to_list(obj): try: return list(obj) except Exception as e: - raise ListConversionException(f"Expected a list or list-like data structure, but got: {obj}") from e \ No newline at end of file + raise ListConversionException(f"Expected a list or list-like data structure, but got: {obj}") from e diff --git a/pinecone/utils/deprecation_notice.py b/pinecone/utils/deprecation_notice.py index bc86a08c..69e33aba 100644 --- a/pinecone/utils/deprecation_notice.py +++ b/pinecone/utils/deprecation_notice.py @@ -1,6 +1,7 @@ from typing import Optional import warnings + def warn_deprecated(description: str, deprecated_in: str, removal_in: str): message = f"DEPRECATED since v{deprecated_in} [Will be removed in v{removal_in}]: {description}" - warnings.warn(message, FutureWarning) \ No newline at end of file + warnings.warn(message, FutureWarning) diff --git a/pinecone/utils/docslinks.py b/pinecone/utils/docslinks.py index aa2ba24d..98e7442f 100644 --- a/pinecone/utils/docslinks.py +++ b/pinecone/utils/docslinks.py @@ -1,4 +1,4 @@ docslinks = { - 'GITHUB_REPO': 'https://github.com/pinecone-io/pinecone-python-client', - 'LANGCHAIN_IMPORT_KB_ARTICLE': 'https://docs.pinecone.io/troubleshooting/pinecone-attribute-errors-with-langchain' -} \ No newline at end of file + "GITHUB_REPO": "https://github.com/pinecone-io/pinecone-python-client", + "LANGCHAIN_IMPORT_KB_ARTICLE": "https://docs.pinecone.io/troubleshooting/pinecone-attribute-errors-with-langchain", +} diff --git a/pinecone/utils/error_handling.py b/pinecone/utils/error_handling.py index 733e6091..8d6c0dd7 100644 --- a/pinecone/utils/error_handling.py +++ b/pinecone/utils/error_handling.py @@ -3,6 +3,7 @@ from urllib3.exceptions import MaxRetryError, ProtocolError + def validate_and_convert_errors(func): @wraps(func) def inner_func(*args, **kwargs): diff --git a/pinecone/utils/fix_tuple_length.py b/pinecone/utils/fix_tuple_length.py index fa4fc5eb..12762986 100644 --- a/pinecone/utils/fix_tuple_length.py +++ b/pinecone/utils/fix_tuple_length.py @@ -1,3 +1,3 @@ def fix_tuple_length(t, n): """Extend tuple t to length n by adding None items at the end of the tuple. Return the new tuple.""" - return t + ((None,) * (n - len(t))) if len(t) < n else t \ No newline at end of file + return t + ((None,) * (n - len(t))) if len(t) < n else t diff --git a/pinecone/utils/normalize_host.py b/pinecone/utils/normalize_host.py index 2eac61a1..79c9cd40 100644 --- a/pinecone/utils/normalize_host.py +++ b/pinecone/utils/normalize_host.py @@ -1,8 +1,8 @@ def normalize_host(host): if host is None: return host - if host.startswith('https://'): + if host.startswith("https://"): return host - if host.startswith('http://'): + if host.startswith("http://"): return host - return 'https://' + host \ No newline at end of file + return "https://" + host diff --git a/pinecone/utils/setup_openapi_client.py b/pinecone/utils/setup_openapi_client.py index f1d33424..10dd53f7 100644 --- a/pinecone/utils/setup_openapi_client.py +++ b/pinecone/utils/setup_openapi_client.py @@ -1,6 +1,7 @@ from .user_agent import get_user_agent import copy + def setup_openapi_client(api_client_klass, api_klass, config, openapi_config, pool_threads, api_version=None, **kwargs): # It is important that we allow the user to pass in a reference to api_client_klass # instead of creating a direct dependency on ApiClient because plugins have their @@ -11,23 +12,24 @@ def setup_openapi_client(api_client_klass, api_klass, config, openapi_config, po # a decision about something. if kwargs.get("host"): openapi_config = copy.deepcopy(openapi_config) - openapi_config._base_path = kwargs['host'] + openapi_config._base_path = kwargs["host"] - api_client = api_client_klass( - configuration=openapi_config, - pool_threads=pool_threads - ) + api_client = api_client_klass(configuration=openapi_config, pool_threads=pool_threads) api_client.user_agent = get_user_agent(config) extra_headers = config.additional_headers or {} for key, value in extra_headers.items(): api_client.set_default_header(key, value) - + if api_version: api_client.set_default_header("X-Pinecone-API-Version", api_version) client = api_klass(api_client) return client + def build_plugin_setup_client(config, openapi_config, pool_threads): def setup_plugin_client(api_client_klass, api_klass, api_version, **kwargs): - return setup_openapi_client(api_client_klass, api_klass, config, openapi_config, pool_threads, api_version, **kwargs) + return setup_openapi_client( + api_client_klass, api_klass, config, openapi_config, pool_threads, api_version, **kwargs + ) + return setup_plugin_client diff --git a/pinecone/utils/user_agent.py b/pinecone/utils/user_agent.py index aeda857b..8ff89e4d 100644 --- a/pinecone/utils/user_agent.py +++ b/pinecone/utils/user_agent.py @@ -4,6 +4,7 @@ from .constants import SOURCE_TAG import re + def _build_source_tag_field(source_tag): # normalize source tag # 1. Lowercase @@ -11,19 +12,22 @@ def _build_source_tag_field(source_tag): # 3. Trim left/right whitespace # 4. Condense multiple spaces to one, and replace with underscore tag = source_tag.lower() - tag = re.sub(r'[^a-z0-9_ :]', '', tag) + tag = re.sub(r"[^a-z0-9_ :]", "", tag) tag = tag.strip() tag = "_".join(tag.split()) return f"{SOURCE_TAG}={tag}" + def _get_user_agent(client_id, config): user_agent_details = {"urllib3": urllib3.__version__} user_agent = "{} ({})".format(client_id, ", ".join([f"{k}:{v}" for k, v in user_agent_details.items()])) user_agent += f"; {_build_source_tag_field(config.source_tag)}" if config.source_tag else "" return user_agent + def get_user_agent(config): return _get_user_agent(f"python-client-{__version__}", config) + def get_user_agent_grpc(config): - return _get_user_agent(f"python-client[grpc]-{__version__}", config) \ No newline at end of file + return _get_user_agent(f"python-client[grpc]-{__version__}", config) diff --git a/pinecone/utils/version.py b/pinecone/utils/version.py index b60a5bc2..1da30cfe 100644 --- a/pinecone/utils/version.py +++ b/pinecone/utils/version.py @@ -1,6 +1,8 @@ from pathlib import Path + def get_version(): return Path(__file__).parent.parent.joinpath("__version__").read_text().strip() -__version__ = get_version() \ No newline at end of file + +__version__ = get_version() diff --git a/poetry.lock b/poetry.lock index a365c304..c0a569ea 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "astunparse" @@ -15,6 +15,52 @@ files = [ six = ">=1.6.1,<2.0" wheel = ">=0.23.0,<1.0" +[[package]] +name = "black" +version = "24.4.2" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "certifi" version = "2023.11.17" @@ -26,6 +72,17 @@ files = [ {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + [[package]] name = "charset-normalizer" version = "3.3.2" @@ -125,6 +182,20 @@ files = [ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "colorama" version = "0.4.6" @@ -200,6 +271,17 @@ files = [ [package.extras] toml = ["tomli"] +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + [[package]] name = "exceptiongroup" version = "1.1.3" @@ -214,6 +296,22 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "filelock" +version = "3.15.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.15.1-py3-none-any.whl", hash = "sha256:71b3102950e91dfc1bb4209b64be4dc8854f40e5f534428d8684f953ac847fac"}, + {file = "filelock-3.15.1.tar.gz", hash = "sha256:58a2549afdf9e02e10720eaa4d4470f56386d7a6f72edd7d0596337af8ed7ad8"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + [[package]] name = "googleapis-common-protos" version = "1.61.0" @@ -311,6 +409,20 @@ files = [ [package.extras] protobuf = ["grpcio-tools (>=1.59.2)"] +[[package]] +name = "identify" +version = "2.5.36" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.36-py2.py3-none-any.whl", hash = "sha256:37d93f380f4de590500d9dba7db359d0d3da95ffe7f9de1753faa159e71e7dfa"}, + {file = "identify-2.5.36.tar.gz", hash = "sha256:e5e00f54165f9047fbebeb4a560f9acfb8af4c88232be60a488e9b68d122745d"}, +] + +[package.extras] +license = ["ukkonen"] + [[package]] name = "idna" version = "3.7" @@ -525,6 +637,17 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + [[package]] name = "numpy" version = "1.24.4" @@ -714,6 +837,17 @@ files = [ numpy = {version = ">=1.26.0", markers = "python_version < \"3.13\""} types-pytz = ">=2022.1.1" +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + [[package]] name = "pdoc" version = "14.1.0" @@ -745,6 +879,22 @@ files = [ {file = "pinecone_plugin_interface-0.0.7.tar.gz", hash = "sha256:b8e6675e41847333aa13923cc44daa3f85676d7157324682dc1640588a982846"}, ] +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + [[package]] name = "pluggy" version = "1.3.0" @@ -760,6 +910,24 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pre-commit" +version = "3.5.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, + {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + [[package]] name = "protobuf" version = "4.25.3" @@ -1161,6 +1329,26 @@ files = [ [package.extras] tests = ["flake8", "pytest", "pytest-cov", "requests"] +[[package]] +name = "virtualenv" +version = "20.26.2" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, + {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + [[package]] name = "wheel" version = "0.42.0" @@ -1181,4 +1369,4 @@ grpc = ["googleapis-common-protos", "grpcio", "grpcio", "lz4", "protobuf", "prot [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "3f06d23e45560281fcd773e7bde9a5d3be62db0de27456d8b8ca332187e8e031" +content-hash = "1c61ed3540dc2bf608b4c29a42f1a0a6a0a5cf48faba3c1e0968ae783091a40f" diff --git a/pyproject.toml b/pyproject.toml index ee663154..e7d8e52c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -84,6 +84,7 @@ types-tqdm = "^4.66.0.3" types-protobuf = "^4.24.0.4" [tool.poetry.group.dev.dependencies] +pre-commit = "^3.0.0" numpy = [ { version = ">=1.22", python = ">=3.9" }, { version = ">=1.21", python = ">=3.8,<3.9" }, @@ -97,6 +98,7 @@ pytest-mock = "3.6.1" pytest-timeout = "2.2.0" urllib3_mock = "0.3.3" responses = ">=0.8.1" +black = "^24.4.2" [tool.poetry.extras] grpc = ["grpcio", "googleapis-common-protos", "lz4", "protobuf", "protoc-gen-openapiv2"] diff --git a/scripts/cleanup-all.py b/scripts/cleanup-all.py index f5841173..b8b5e88f 100644 --- a/scripts/cleanup-all.py +++ b/scripts/cleanup-all.py @@ -1,25 +1,26 @@ import os from pinecone import Pinecone + def main(): - pc = Pinecone(api_key=os.environ.get('PINECONE_API_KEY', None)) + pc = Pinecone(api_key=os.environ.get("PINECONE_API_KEY", None)) for collection in pc.list_collections().names(): try: - print('Deleting collection: ' + collection) + print("Deleting collection: " + collection) pc.delete_collection(collection) except Exception as e: - print('Failed to delete collection: ' + collection + ' ' + str(e)) + print("Failed to delete collection: " + collection + " " + str(e)) pass for index in pc.list_indexes().names(): try: - print('Deleting index: ' + index) + print("Deleting index: " + index) pc.delete_index(index) except Exception as e: - print('Failed to delete index: ' + index + ' ' + str(e)) + print("Failed to delete index: " + index + " " + str(e)) pass -if __name__ == '__main__': - main() +if __name__ == "__main__": + main() diff --git a/scripts/create-index-legacy.py b/scripts/create-index-legacy.py index 2a98025e..6c85109d 100644 --- a/scripts/create-index-legacy.py +++ b/scripts/create-index-legacy.py @@ -4,60 +4,58 @@ import pinecone import time + def read_env_var(name): value = os.environ.get(name) if value is None: - raise Exception('Environment variable {} is not set'.format(name)) + raise Exception("Environment variable {} is not set".format(name)) return value + def random_embedding_values(dimension=2): return [random.random() for _ in range(dimension)] + def random_string(length): - return ''.join(random.choice(string.ascii_lowercase) for i in range(length)) + return "".join(random.choice(string.ascii_lowercase) for i in range(length)) + def main(): - api_key = read_env_var('PINECONE_API_KEY') - environment = read_env_var('PINECONE_ENVIRONMENT') - index_name = read_env_var('INDEX_NAME') - dimension = int(read_env_var('DIMENSION')) - metric = read_env_var('METRIC') + api_key = read_env_var("PINECONE_API_KEY") + environment = read_env_var("PINECONE_ENVIRONMENT") + index_name = read_env_var("INDEX_NAME") + dimension = int(read_env_var("DIMENSION")) + metric = read_env_var("METRIC") - print(f'Beginning test with environment {environment} and index {index_name}') + print(f"Beginning test with environment {environment} and index {index_name}") - pinecone.init( - api_key=api_key, - environment=environment - ) + pinecone.init(api_key=api_key, environment=environment) if index_name in pinecone.list_indexes(): - print(f'Index {index_name} already exists') + print(f"Index {index_name} already exists") pinecone.delete_index(index_name) - - pinecone.create_index( - name=index_name, - dimension=dimension, - metric=metric - ) - - print(f'Waiting for index {index_name} to be ready...') + + pinecone.create_index(name=index_name, dimension=dimension, metric=metric) + + print(f"Waiting for index {index_name} to be ready...") time.sleep(60) - print(f'Done waiting.') + print(f"Done waiting.") description = pinecone.describe_index(index_name) - print(f'Index description: {description}') + print(f"Index description: {description}") - print(f'Beginning upsert of 1000 vectors to index {index_name}...') + print(f"Beginning upsert of 1000 vectors to index {index_name}...") index = pinecone.Index(name=index_name) for _ in range(100): vector = random_embedding_values(dimension) - vecs = [{'id': random_string(10), 'values': vector} for i in range(10)] + vecs = [{"id": random_string(10), "values": vector} for i in range(10)] index.upsert(vectors=[vecs]) - print(f'Done upserting.') + print(f"Done upserting.") - print(f'Beginning query of index {index_name}...') + print(f"Beginning query of index {index_name}...") index.query(vector=random_embedding_values(dimension)) - print(f'Done querying.') + print(f"Done querying.") + -if __name__ == '__main__': - main() \ No newline at end of file +if __name__ == "__main__": + main() diff --git a/scripts/create.py b/scripts/create.py index 352c2137..a8b63493 100644 --- a/scripts/create.py +++ b/scripts/create.py @@ -3,35 +3,39 @@ import string from pinecone import Pinecone + def read_env_var(name): value = os.environ.get(name) if value is None: - raise 'Environment variable {} is not set'.format(name) + raise "Environment variable {} is not set".format(name) return value + def random_string(length): - return ''.join(random.choice(string.ascii_lowercase) for i in range(length)) + return "".join(random.choice(string.ascii_lowercase) for i in range(length)) + def write_gh_output(name, value): - with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: - print(f'{name}={value}', file=fh) + with open(os.environ["GITHUB_OUTPUT"], "a") as fh: + print(f"{name}={value}", file=fh) + def main(): - pc = Pinecone(api_key=read_env_var('PINECONE_API_KEY')) - index_name = read_env_var('NAME_PREFIX') + random_string(20) + pc = Pinecone(api_key=read_env_var("PINECONE_API_KEY")) + index_name = read_env_var("NAME_PREFIX") + random_string(20) pc.create_index( name=index_name, - metric=read_env_var('METRIC'), - dimension=int(read_env_var('DIMENSION')), + metric=read_env_var("METRIC"), + dimension=int(read_env_var("DIMENSION")), spec={ - 'serverless': { - 'cloud': read_env_var('CLOUD'), - 'region': read_env_var('REGION'), + "serverless": { + "cloud": read_env_var("CLOUD"), + "region": read_env_var("REGION"), } - } + }, ) - write_gh_output('index_name', index_name) + write_gh_output("index_name", index_name) -if __name__ == '__main__': - main() +if __name__ == "__main__": + main() diff --git a/scripts/delete-all-collections.py b/scripts/delete-all-collections.py index 1a20c8a9..0f03c1bc 100644 --- a/scripts/delete-all-collections.py +++ b/scripts/delete-all-collections.py @@ -1,20 +1,22 @@ import os from pinecone import Pinecone + def read_env_var(name): value = os.environ.get(name) if value is None: - raise Exception('Environment variable {} is not set'.format(name)) + raise Exception("Environment variable {} is not set".format(name)) return value + def main(): - pc = Pinecone(api_key=read_env_var('PINECONE_API_KEY')) + pc = Pinecone(api_key=read_env_var("PINECONE_API_KEY")) collections = pc.list_collections().names() for collection in collections: if collection != "": pc.delete_collection(collection) -if __name__ == '__main__': - main() +if __name__ == "__main__": + main() diff --git a/scripts/delete.py b/scripts/delete.py index af047fea..e51d5588 100644 --- a/scripts/delete.py +++ b/scripts/delete.py @@ -1,17 +1,20 @@ import os from pinecone import Pinecone + def read_env_var(name): value = os.environ.get(name) if value is None: - raise 'Environment variable {} is not set'.format(name) + raise "Environment variable {} is not set".format(name) return value + def main(): - pc = Pinecone(api_key=read_env_var('PINECONE_API_KEY')) - to_delete = read_env_var('INDEX_NAME') + pc = Pinecone(api_key=read_env_var("PINECONE_API_KEY")) + to_delete = read_env_var("INDEX_NAME") pc.delete_index(name=to_delete) - print('Index deleted: ' + to_delete) + print("Index deleted: " + to_delete) + -if __name__ == '__main__': - main() \ No newline at end of file +if __name__ == "__main__": + main() diff --git a/scripts/generate_usage.py b/scripts/generate_usage.py index 1a2fc4a3..ba5791e0 100755 --- a/scripts/generate_usage.py +++ b/scripts/generate_usage.py @@ -3,45 +3,53 @@ import string from pinecone.grpc import PineconeGRPC + def read_env_var(name): value = os.environ.get(name) if value is None: - raise Exception('Environment variable {} is not set'.format(name)) + raise Exception("Environment variable {} is not set".format(name)) return value + def random_string(length): - return ''.join(random.choice(string.ascii_lowercase) for i in range(length)) + return "".join(random.choice(string.ascii_lowercase) for i in range(length)) + def random_embedding_values(dimension=2): return [random.random() for _ in range(dimension)] + def write_gh_output(name, value): - with open(os.environ['GITHUB_OUTPUT'], 'a') as fh: - print(f'{name}={value}', file=fh) + with open(os.environ["GITHUB_OUTPUT"], "a") as fh: + print(f"{name}={value}", file=fh) + + +DIMENSION = 1536 # common for openai embeddings -DIMENSION = 1536 # common for openai embeddings def create_index_if_not_exists(pc, index_name): if index_name not in pc.list_indexes().names(): - print(f'Index {index_name} does not exist, creating it') + print(f"Index {index_name} does not exist, creating it") pc.create_index( name=index_name, - metric='cosine', + metric="cosine", dimension=DIMENSION, spec={ - 'serverless': { - 'cloud': read_env_var('CLOUD'), - 'region': read_env_var('REGION'), + "serverless": { + "cloud": read_env_var("CLOUD"), + "region": read_env_var("REGION"), } - } + }, ) + upserted_ids = set() + def main(): - pc = PineconeGRPC(api_key=read_env_var('PINECONE_API_KEY')) - index_name = read_env_var('INDEX_NAME') - iterations = int(read_env_var('ITERATIONS')) + pc = PineconeGRPC(api_key=read_env_var("PINECONE_API_KEY")) + index_name = read_env_var("INDEX_NAME") + iterations = int(read_env_var("ITERATIONS")) create_index_if_not_exists(pc, index_name) @@ -52,39 +60,40 @@ def main(): items_to_upsert = random.randint(1, 100) vector_list = [ { - 'id': random_string(10), - 'values': random_embedding_values(DIMENSION), - 'metadata': { - 'genre': random.choice(['action', 'comedy', 'drama']), - 'runtime': random.randint(60, 120) - } - } for x in range(items_to_upsert) + "id": random_string(10), + "values": random_embedding_values(DIMENSION), + "metadata": { + "genre": random.choice(["action", "comedy", "drama"]), + "runtime": random.randint(60, 120), + }, + } + for x in range(items_to_upsert) ] index.upsert(vectors=vector_list) - print('Upserted {} vectors'.format(items_to_upsert)) + print("Upserted {} vectors".format(items_to_upsert)) for v in vector_list: - upserted_ids.add(v['id']) + upserted_ids.add(v["id"]) # Fetch some vectors ids_to_fetch = random.sample(upserted_ids, k=random.randint(1, 20)) - print('Fetching {} vectors'.format(len(ids_to_fetch))) + print("Fetching {} vectors".format(len(ids_to_fetch))) fetched_vectors = index.fetch(ids=ids_to_fetch) # Query some vectors - print('Querying 10 times') + print("Querying 10 times") for i in range(10): # Query by vector values query_vector = random_embedding_values(DIMENSION) query_results = index.query(vector=query_vector, top_k=10) - + # Delete some vectors - print('Deleting some vectors') + print("Deleting some vectors") id_to_delete = random.sample(upserted_ids, k=random.randint(1, 10)) index.delete(ids=id_to_delete) except Exception as e: - print('Exception: {}'.format(e)) + print("Exception: {}".format(e)) -if __name__ == '__main__': - main() +if __name__ == "__main__": + main() diff --git a/tests/dependency/grpc/test_sanity.py b/tests/dependency/grpc/test_sanity.py index c278b607..a7d8fb25 100644 --- a/tests/dependency/grpc/test_sanity.py +++ b/tests/dependency/grpc/test_sanity.py @@ -3,33 +3,32 @@ import time from pinecone.grpc import PineconeGRPC + @pytest.fixture def index_name(): - name = os.environ.get('INDEX_NAME', None) + name = os.environ.get("INDEX_NAME", None) if name is None: - raise 'INDEX_NAME environment variable is not set' + raise "INDEX_NAME environment variable is not set" return name + @pytest.fixture def client(): - return PineconeGRPC(api_key=os.environ.get('PINECONE_API_KEY')) + return PineconeGRPC(api_key=os.environ.get("PINECONE_API_KEY")) + class TestSanityRest: def test_sanity(self, index_name, client): - print('Testing with index name: ' + index_name) - assert index_name != '' - + print("Testing with index name: " + index_name) + assert index_name != "" + # Verify index exists with expected properties assert index_name in client.list_indexes().names() description = client.describe_index(name=index_name) assert description.dimension == 2 idx = client.Index(index_name) - idx.upsert(vectors=[ - ('1', [1.0, 2.0]), - ('2', [3.0, 4.0]), - ('3', [5.0, 6.0]) - ]) + idx.upsert(vectors=[("1", [1.0, 2.0]), ("2", [3.0, 4.0]), ("3", [5.0, 6.0])]) # Wait for index freshness time.sleep(30) @@ -40,6 +39,6 @@ def test_sanity(self, index_name, client): assert description.total_vector_count == 3 # Query for results - query_results = idx.query(id='1', top_k=10, include_values=True) - assert query_results.matches[0].id == '1' - assert len(query_results.matches) == 3 \ No newline at end of file + query_results = idx.query(id="1", top_k=10, include_values=True) + assert query_results.matches[0].id == "1" + assert len(query_results.matches) == 3 diff --git a/tests/dependency/rest/test_sanity.py b/tests/dependency/rest/test_sanity.py index 7e348b10..05ba5ed2 100644 --- a/tests/dependency/rest/test_sanity.py +++ b/tests/dependency/rest/test_sanity.py @@ -3,33 +3,32 @@ import time from pinecone import Pinecone + @pytest.fixture def index_name(): - name = os.environ.get('INDEX_NAME', None) - if name is None or name == '': - raise 'INDEX_NAME environment variable is not set' + name = os.environ.get("INDEX_NAME", None) + if name is None or name == "": + raise "INDEX_NAME environment variable is not set" return name + @pytest.fixture def client(): - return Pinecone(api_key=os.environ.get('PINECONE_API_KEY')) + return Pinecone(api_key=os.environ.get("PINECONE_API_KEY")) + class TestSanityRest: def test_sanity(self, index_name, client): - print('Testing with index name: ' + index_name) - assert index_name != '' - + print("Testing with index name: " + index_name) + assert index_name != "" + # Verify index exists with expected properties assert index_name in client.list_indexes().names() description = client.describe_index(name=index_name) assert description.dimension == 2 idx = client.Index(index_name) - idx.upsert(vectors=[ - ('1', [1.0, 2.0]), - ('2', [3.0, 4.0]), - ('3', [5.0, 6.0]) - ]) + idx.upsert(vectors=[("1", [1.0, 2.0]), ("2", [3.0, 4.0]), ("3", [5.0, 6.0])]) # Wait for index freshness time.sleep(30) @@ -40,6 +39,6 @@ def test_sanity(self, index_name, client): assert description.total_vector_count == 3 # Query for results - query_results = idx.query(id='1', top_k=10, include_values=True) - assert query_results.matches[0].id == '1' - assert len(query_results.matches) == 3 \ No newline at end of file + query_results = idx.query(id="1", top_k=10, include_values=True) + assert query_results.matches[0].id == "1" + assert len(query_results.matches) == 3 diff --git a/tests/integration/control/pod/conftest.py b/tests/integration/control/pod/conftest.py index afadabe6..35f6613f 100644 --- a/tests/integration/control/pod/conftest.py +++ b/tests/integration/control/pod/conftest.py @@ -5,116 +5,109 @@ from pinecone import Pinecone, PodSpec from ...helpers import generate_index_name, get_environment_var + @pytest.fixture() def client(): - api_key = get_environment_var('PINECONE_API_KEY') - return Pinecone( - api_key=api_key, - additional_headers={'sdk-test-suite': 'pinecone-python-client'} - ) + api_key = get_environment_var("PINECONE_API_KEY") + return Pinecone(api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"}) + @pytest.fixture() def environment(): - return get_environment_var('PINECONE_ENVIRONMENT') + return get_environment_var("PINECONE_ENVIRONMENT") + @pytest.fixture() def dimension(): - return int(get_environment_var('DIMENSION')) + return int(get_environment_var("DIMENSION")) + @pytest.fixture() def create_index_params(index_name, environment, dimension, metric): - spec = { - 'pod': { - 'environment': environment, - 'pod_type': 'p1.x1' - } - } - return dict( - name=index_name, - dimension=dimension, - metric=metric, - spec=spec, - timeout=-1 - ) + spec = {"pod": {"environment": environment, "pod_type": "p1.x1"}} + return dict(name=index_name, dimension=dimension, metric=metric, spec=spec, timeout=-1) + @pytest.fixture() def metric(): - return get_environment_var('METRIC') + return get_environment_var("METRIC") + @pytest.fixture() def random_vector(dimension): def _random_vector(): return [random.uniform(0, 1) for _ in range(dimension)] + return _random_vector + @pytest.fixture() def index_name(request): test_name = request.node.name return generate_index_name(test_name) + @pytest.fixture() def ready_index(client, index_name, create_index_params): - create_index_params['timeout'] = None + create_index_params["timeout"] = None client.create_index(**create_index_params) - time.sleep(10) # Extra wait, since status is sometimes inaccurate + time.sleep(10) # Extra wait, since status is sometimes inaccurate yield index_name client.delete_index(index_name, -1) + @pytest.fixture() def notready_index(client, index_name, create_index_params): - create_index_params.update({'timeout': -1 }) + create_index_params.update({"timeout": -1}) client.create_index(**create_index_params) yield index_name + def index_exists(index_name, client): return index_name in client.list_indexes().names() def random_string(): - return ''.join(random.choice(string.ascii_lowercase) for i in range(10)) + return "".join(random.choice(string.ascii_lowercase) for i in range(10)) -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def reusable_collection(): pc = Pinecone( - api_key=get_environment_var('PINECONE_API_KEY'), - additional_headers={'sdk-test-suite': 'pinecone-python-client'} + api_key=get_environment_var("PINECONE_API_KEY"), additional_headers={"sdk-test-suite": "pinecone-python-client"} ) - index_name = 'temp-index-' + random_string() - dimension = int(get_environment_var('DIMENSION')) + index_name = "temp-index-" + random_string() + dimension = int(get_environment_var("DIMENSION")) print(f"Creating index {index_name} to prepare a collection...") pc.create_index( name=index_name, dimension=dimension, - metric=get_environment_var('METRIC'), + metric=get_environment_var("METRIC"), spec=PodSpec( - environment=get_environment_var('PINECONE_ENVIRONMENT'), - ) + environment=get_environment_var("PINECONE_ENVIRONMENT"), + ), ) print(f"Created index {index_name}. Waiting 10 seconds to make sure it's ready...") time.sleep(10) - + num_vectors = 10 - vectors = [ - (str(i), [random.uniform(0, 1) for _ in range(dimension)]) for i in range(num_vectors) ] - + vectors = [(str(i), [random.uniform(0, 1) for _ in range(dimension)]) for i in range(num_vectors)] + index = pc.Index(index_name) index.upsert(vectors=vectors) - collection_name = 'reused-coll-' + random_string() - pc.create_collection( - name=collection_name, - source=index_name - ) - + collection_name = "reused-coll-" + random_string() + pc.create_collection(name=collection_name, source=index_name) + time_waited = 0 desc = pc.describe_collection(collection_name) - collection_ready = desc['status'] - while collection_ready.lower() != 'ready' and time_waited < 120: + collection_ready = desc["status"] + while collection_ready.lower() != "ready" and time_waited < 120: print(f"Waiting for collection {collection_name} to be ready. Waited {time_waited} seconds...") time.sleep(5) time_waited += 5 desc = pc.describe_collection(collection_name) - collection_ready = desc['status'] + collection_ready = desc["status"] if time_waited >= 120: raise Exception(f"Collection {collection_name} is not ready after 120 seconds") @@ -127,6 +120,7 @@ def reusable_collection(): print(f"Deleting collection {collection_name}...") pc.delete_collection(collection_name) + @pytest.fixture(autouse=True) def cleanup(client, index_name): yield diff --git a/tests/integration/control/pod/test_collections.py b/tests/integration/control/pod/test_collections.py index cfe79c77..10eb0499 100644 --- a/tests/integration/control/pod/test_collections.py +++ b/tests/integration/control/pod/test_collections.py @@ -4,31 +4,35 @@ import time from pinecone import PodSpec + def random_string(): - return ''.join(random.choice(string.ascii_lowercase) for i in range(10)) + return "".join(random.choice(string.ascii_lowercase) for i in range(10)) + class TestCollectionsHappyPath: - def test_index_to_collection_to_index_happy_path(self, client, environment, dimension, metric, ready_index, random_vector): + def test_index_to_collection_to_index_happy_path( + self, client, environment, dimension, metric, ready_index, random_vector + ): index = client.Index(ready_index) num_vectors = 10 - vectors = [ (str(i), random_vector()) for i in range(num_vectors) ] + vectors = [(str(i), random_vector()) for i in range(num_vectors)] index.upsert(vectors=vectors) - collection_name = 'coll1-' + random_string() + collection_name = "coll1-" + random_string() client.create_collection(name=collection_name, source=ready_index) desc = client.describe_collection(collection_name) - assert desc['name'] == collection_name - assert desc['environment'] == environment - assert desc['status'] == 'Initializing' + assert desc["name"] == collection_name + assert desc["environment"] == environment + assert desc["status"] == "Initializing" time_waited = 0 - collection_ready = desc['status'] - while collection_ready.lower() != 'ready' and time_waited < 120: + collection_ready = desc["status"] + while collection_ready.lower() != "ready" and time_waited < 120: print(f"Waiting for collection {collection_name} to be ready. Waited {time_waited} seconds...") time.sleep(5) time_waited += 5 desc = client.describe_collection(collection_name) - collection_ready = desc['status'] + collection_ready = desc["status"] assert collection_name in client.list_collections().names() @@ -36,31 +40,30 @@ def test_index_to_collection_to_index_happy_path(self, client, environment, dime raise Exception(f"Collection {collection_name} is not ready after 120 seconds") # After collection ready, these should all be defined - assert desc['name'] == collection_name - assert desc['status'] == 'Ready' - assert desc['environment'] == environment - assert desc['dimension'] == dimension - assert desc['vector_count'] == num_vectors - assert desc['size'] != None - assert desc['size'] > 0 + assert desc["name"] == collection_name + assert desc["status"] == "Ready" + assert desc["environment"] == environment + assert desc["dimension"] == dimension + assert desc["vector_count"] == num_vectors + assert desc["size"] != None + assert desc["size"] > 0 # Create index from collection - index_name = 'index-from-collection-' + collection_name + index_name = "index-from-collection-" + collection_name print(f"Creating index {index_name} from collection {collection_name}...") client.create_index( - name=index_name, - dimension=dimension, - metric=metric, - spec=PodSpec( - environment=environment, - source_collection=collection_name - ) + name=index_name, + dimension=dimension, + metric=metric, + spec=PodSpec(environment=environment, source_collection=collection_name), + ) + print( + f"Created index {index_name} from collection {collection_name}. Waiting a little more to make sure it's ready..." ) - print(f"Created index {index_name} from collection {collection_name}. Waiting a little more to make sure it's ready...") time.sleep(30) desc = client.describe_index(index_name) - assert desc['name'] == index_name - assert desc['status']['ready'] == True + assert desc["name"] == index_name + assert desc["status"]["ready"] == True new_index = client.Index(index_name) @@ -80,19 +83,18 @@ def test_index_to_collection_to_index_happy_path(self, client, environment, dime client.delete_collection(collection_name) client.delete_index(index_name) - def test_create_index_with_different_metric_from_orig_index(self, client, dimension, metric, environment, reusable_collection): - metrics = ['cosine', 'euclidean', 'dotproduct'] + def test_create_index_with_different_metric_from_orig_index( + self, client, dimension, metric, environment, reusable_collection + ): + metrics = ["cosine", "euclidean", "dotproduct"] target_metric = random.choice([x for x in metrics if x != metric]) - - index_name = 'from-coll-' + random_string() + + index_name = "from-coll-" + random_string() client.create_index( - name=index_name, - dimension=dimension, - metric=target_metric, - spec=PodSpec( - environment=environment, - source_collection=reusable_collection - ) + name=index_name, + dimension=dimension, + metric=target_metric, + spec=PodSpec(environment=environment, source_collection=reusable_collection), ) time.sleep(10) client.delete_index(index_name, -1) diff --git a/tests/integration/control/pod/test_collections_errors.py b/tests/integration/control/pod/test_collections_errors.py index deaf7b42..f91835eb 100644 --- a/tests/integration/control/pod/test_collections_errors.py +++ b/tests/integration/control/pod/test_collections_errors.py @@ -4,69 +4,62 @@ import time from pinecone import PodSpec + def random_string(): - return ''.join(random.choice(string.ascii_lowercase) for i in range(10)) + return "".join(random.choice(string.ascii_lowercase) for i in range(10)) + class TestCollectionErrorCases: def test_create_index_with_nonexistent_source_collection(self, client, dimension, metric, environment): with pytest.raises(Exception) as e: - index_name = 'from-nonexistent-coll-' + random_string() + index_name = "from-nonexistent-coll-" + random_string() client.create_index( name=index_name, dimension=dimension, metric=metric, - spec=PodSpec( - environment=environment, - source_collection='doesnotexist' - ) + spec=PodSpec(environment=environment, source_collection="doesnotexist"), ) client.delete_index(index_name, -1) - assert 'Resource doesnotexist not found' in str(e.value) + assert "Resource doesnotexist not found" in str(e.value) def test_create_index_in_mismatched_environment(self, client, dimension, metric, environment, reusable_collection): envs = [ - 'eastus-azure', - 'eu-west4-gcp', - 'northamerica-northeast1-gcp', - 'us-central1-gcp', - 'us-west4-gcp', - 'asia-southeast1-gcp', - 'us-east-1-aws', - 'asia-northeast1-gcp', - 'eu-west1-gcp', - 'us-east1-gcp', - 'us-east4-gcp', - 'us-west1-gcp', + "eastus-azure", + "eu-west4-gcp", + "northamerica-northeast1-gcp", + "us-central1-gcp", + "us-west4-gcp", + "asia-southeast1-gcp", + "us-east-1-aws", + "asia-northeast1-gcp", + "eu-west1-gcp", + "us-east1-gcp", + "us-east4-gcp", + "us-west1-gcp", ] target_env = random.choice([x for x in envs if x != environment]) - + with pytest.raises(Exception) as e: - index_name = 'from-coll-' + random_string() + index_name = "from-coll-" + random_string() client.create_index( - name=index_name, - dimension=dimension, - metric=metric, - spec=PodSpec( - environment=target_env, - source_collection=reusable_collection - ) + name=index_name, + dimension=dimension, + metric=metric, + spec=PodSpec(environment=target_env, source_collection=reusable_collection), ) client.delete_index(index_name, -1) - assert 'Source collection must be in the same environment as the index' in str(e.value) + assert "Source collection must be in the same environment as the index" in str(e.value) @pytest.mark.skip(reason="Bug reported in #global-cps") def test_create_index_with_mismatched_dimension(self, client, dimension, metric, environment, reusable_collection): with pytest.raises(Exception) as e: client.create_index( - name='from-coll-' + random_string(), - dimension=dimension + 1, - metric=metric, - spec=PodSpec( - environment=environment, - source_collection=reusable_collection - ) + name="from-coll-" + random_string(), + dimension=dimension + 1, + metric=metric, + spec=PodSpec(environment=environment, source_collection=reusable_collection), ) - assert 'Index and collection must have the same dimension' in str(e.value) + assert "Index and collection must have the same dimension" in str(e.value) # def test_create_index_from_notready_collection(self, client, ready_index, random_vector, dimension, metric, environment): # index = client.Index(ready_index) @@ -78,14 +71,14 @@ def test_create_index_with_mismatched_dimension(self, client, dimension, metric, # client.create_collection(name=collection_name, source=ready_index) # # Not doing any waiting for collection to be ready - + # with pytest.raises(Exception) as e: # client.create_index( # name='coll-notready-idx-' + random_string(), - # dimension=dimension, - # metric=metric, + # dimension=dimension, + # metric=metric, # spec=PodSpec( - # environment=environment, + # environment=environment, # source_collection=collection_name # ) # ) @@ -93,13 +86,13 @@ def test_create_index_with_mismatched_dimension(self, client, dimension, metric, # assert 'Source collection is not ready' in str(e.value) def test_create_collection_from_not_ready_index(self, client, notready_index): - name = 'coll3-' + random_string() + name = "coll3-" + random_string() with pytest.raises(Exception) as e: client.create_collection(name, notready_index) - assert 'Source index is not ready' in str(e.value) + assert "Source index is not ready" in str(e.value) def test_create_collection_with_invalid_index(self, client): - name = 'coll4-' + random_string() + name = "coll4-" + random_string() with pytest.raises(Exception) as e: - client.create_collection(name, 'invalid_index') - assert 'Resource invalid_index not found' in str(e.value) \ No newline at end of file + client.create_collection(name, "invalid_index") + assert "Resource invalid_index not found" in str(e.value) diff --git a/tests/integration/control/pod/test_configure_pod_index.py b/tests/integration/control/pod/test_configure_pod_index.py index 74ab55f5..72246801 100644 --- a/tests/integration/control/pod/test_configure_pod_index.py +++ b/tests/integration/control/pod/test_configure_pod_index.py @@ -1,7 +1,8 @@ import pytest import time -class TestConfigurePodIndex(): + +class TestConfigurePodIndex: def test_configure_pod_index(self, client, ready_index): - time.sleep(10) # Wait a little more, just in case. - client.configure_index(ready_index, replicas=1, pod_type='p1.x1') \ No newline at end of file + time.sleep(10) # Wait a little more, just in case. + client.configure_index(ready_index, replicas=1, pod_type="p1.x1") diff --git a/tests/integration/control/serverless/conftest.py b/tests/integration/control/serverless/conftest.py index dad78574..1a6ee0ba 100644 --- a/tests/integration/control/serverless/conftest.py +++ b/tests/integration/control/serverless/conftest.py @@ -5,101 +5,108 @@ from pinecone import Pinecone, NotFoundException, PineconeApiException from ...helpers import generate_index_name, get_environment_var + @pytest.fixture() def client(): - api_key = get_environment_var('PINECONE_API_KEY') - return Pinecone( - api_key=api_key, - additional_headers={'sdk-test-suite': 'pinecone-python-client'} - ) + api_key = get_environment_var("PINECONE_API_KEY") + return Pinecone(api_key=api_key, additional_headers={"sdk-test-suite": "pinecone-python-client"}) + @pytest.fixture() def serverless_cloud(): - return get_environment_var('SERVERLESS_CLOUD', 'aws') + return get_environment_var("SERVERLESS_CLOUD", "aws") + @pytest.fixture() def serverless_region(): - return get_environment_var('SERVERLESS_REGION', 'us-west-2') + return get_environment_var("SERVERLESS_REGION", "us-west-2") + @pytest.fixture() def create_sl_index_params(index_name, serverless_cloud, serverless_region): - spec = {"serverless": { - 'cloud': serverless_cloud, - 'region': serverless_region - }} - return dict(name=index_name, dimension=10, metric='cosine', spec=spec) + spec = {"serverless": {"cloud": serverless_cloud, "region": serverless_region}} + return dict(name=index_name, dimension=10, metric="cosine", spec=spec) + @pytest.fixture() def random_vector(): return [random.uniform(0, 1) for _ in range(10)] + @pytest.fixture() def index_name(request): test_name = request.node.name return generate_index_name(test_name) + @pytest.fixture() def ready_sl_index(client, index_name, create_sl_index_params): - create_sl_index_params['timeout'] = None + create_sl_index_params["timeout"] = None client.create_index(**create_sl_index_params) yield index_name client.delete_index(index_name, -1) + @pytest.fixture() def notready_sl_index(client, index_name, create_sl_index_params): client.create_index(**create_sl_index_params, timeout=-1) yield index_name + @pytest.fixture() def ready_pod_index(client, index_name, create_pod_index_params): - del create_pod_index_params['timeout'] + del create_pod_index_params["timeout"] client.create_index(**create_pod_index_params) yield index_name + @pytest.fixture() def notready_pod_index(client, index_name, create_pod_index_params): client.create_index(**create_pod_index_params) yield index_name + def delete_with_retry(client, index_name, retries=0, sleep_interval=5): - print('Deleting index ' + index_name + ', retry ' + str(retries) + ', next sleep interval ' + str(sleep_interval)) + print("Deleting index " + index_name + ", retry " + str(retries) + ", next sleep interval " + str(sleep_interval)) try: client.delete_index(index_name, -1) except NotFoundException: pass except PineconeApiException as e: - if e.error.code == 'PRECONDITON_FAILED': + if e.error.code == "PRECONDITON_FAILED": if retries > 5: - raise 'Unable to delete index ' + index_name + raise "Unable to delete index " + index_name time.sleep(sleep_interval) delete_with_retry(client, index_name, retries + 1, sleep_interval * 2) else: print(e.__class__) print(e) - raise 'Unable to delete index ' + index_name + raise "Unable to delete index " + index_name except Exception as e: print(e.__class__) print(e) - raise 'Unable to delete index ' + index_name + raise "Unable to delete index " + index_name + @pytest.fixture(autouse=True) def cleanup(client, index_name): yield try: - client.delete_index(index_name, -1) + client.delete_index(index_name, -1) except: - pass + pass + -@pytest.fixture(autouse=True, scope='session') +@pytest.fixture(autouse=True, scope="session") def cleanup_all(): yield - client = Pinecone(additional_headers={'sdk-test-suite': 'pinecone-python-client'}) + client = Pinecone(additional_headers={"sdk-test-suite": "pinecone-python-client"}) for index in client.list_indexes(): - buildNumber = os.getenv('GITHUB_BUILD_NUMBER') + buildNumber = os.getenv("GITHUB_BUILD_NUMBER") if index.name.startswith(buildNumber): try: delete_with_retry(client, index.name) except: - pass \ No newline at end of file + pass diff --git a/tests/integration/control/serverless/test_create_index_api_errors.py b/tests/integration/control/serverless/test_create_index_api_errors.py index 32bde3c4..b4807b2c 100644 --- a/tests/integration/control/serverless/test_create_index_api_errors.py +++ b/tests/integration/control/serverless/test_create_index_api_errors.py @@ -1,19 +1,20 @@ import pytest from pinecone import PineconeApiException, PineconeApiValueError + class TestCreateIndexApiErrorCases: def test_create_index_with_invalid_name(self, client, create_sl_index_params): - create_sl_index_params['name'] = 'Invalid-name' + create_sl_index_params["name"] = "Invalid-name" with pytest.raises(PineconeApiException): client.create_index(**create_sl_index_params) def test_create_index_invalid_metric(self, client, create_sl_index_params): - create_sl_index_params['metric'] = 'invalid' + create_sl_index_params["metric"] = "invalid" with pytest.raises(PineconeApiValueError): client.create_index(**create_sl_index_params) def test_create_index_with_invalid_neg_dimension(self, client, create_sl_index_params): - create_sl_index_params['dimension'] = -1 + create_sl_index_params["dimension"] = -1 with pytest.raises(PineconeApiValueError): client.create_index(**create_sl_index_params) @@ -22,10 +23,10 @@ def test_create_index_that_already_exists(self, client, create_sl_index_params): with pytest.raises(PineconeApiException): client.create_index(**create_sl_index_params) - @pytest.mark.skip(reason='Bug filed https://app.asana.com/0/1205078872348810/1205917627868143') + @pytest.mark.skip(reason="Bug filed https://app.asana.com/0/1205078872348810/1205917627868143") def test_create_index_w_incompatible_options(self, client, create_sl_index_params): - create_sl_index_params['pod_type'] = 'p1.x2' - create_sl_index_params['environment'] = 'us-east1-gcp' - create_sl_index_params['replicas'] = 2 + create_sl_index_params["pod_type"] = "p1.x2" + create_sl_index_params["environment"] = "us-east1-gcp" + create_sl_index_params["replicas"] = 2 with pytest.raises(PineconeApiException): - client.create_index(**create_sl_index_params) \ No newline at end of file + client.create_index(**create_sl_index_params) diff --git a/tests/integration/control/serverless/test_create_index_sl_happy_path.py b/tests/integration/control/serverless/test_create_index_sl_happy_path.py index 4b813912..4924b590 100644 --- a/tests/integration/control/serverless/test_create_index_sl_happy_path.py +++ b/tests/integration/control/serverless/test_create_index_sl_happy_path.py @@ -1,18 +1,19 @@ import pytest + class TestCreateSLIndexHappyPath: - def test_create_index(self, client, create_sl_index_params): - name = create_sl_index_params['name'] - dimension = create_sl_index_params['dimension'] + def test_create_index(self, client, create_sl_index_params): + name = create_sl_index_params["name"] + dimension = create_sl_index_params["dimension"] client.create_index(**create_sl_index_params) desc = client.describe_index(name) assert desc.name == name assert desc.dimension == dimension - assert desc.metric == 'cosine' + assert desc.metric == "cosine" - @pytest.mark.parametrize('metric', ['cosine', 'euclidean', 'dotproduct']) + @pytest.mark.parametrize("metric", ["cosine", "euclidean", "dotproduct"]) def test_create_index_with_metric(self, client, create_sl_index_params, metric): - create_sl_index_params['metric'] = metric + create_sl_index_params["metric"] = metric client.create_index(**create_sl_index_params) - desc = client.describe_index(create_sl_index_params['name']) - assert desc.metric == metric \ No newline at end of file + desc = client.describe_index(create_sl_index_params["name"]) + assert desc.metric == metric diff --git a/tests/integration/control/serverless/test_create_index_timeouts.py b/tests/integration/control/serverless/test_create_index_timeouts.py index cb7e3ba5..89344029 100644 --- a/tests/integration/control/serverless/test_create_index_timeouts.py +++ b/tests/integration/control/serverless/test_create_index_timeouts.py @@ -1,28 +1,28 @@ -import pytest +import pytest + class TestCreateIndexWithTimeout: def test_create_index_default_timeout(self, client, create_sl_index_params): - create_sl_index_params['timeout'] = None + create_sl_index_params["timeout"] = None client.create_index(**create_sl_index_params) # Waits infinitely for index to be ready - desc = client.describe_index(create_sl_index_params['name']) + desc = client.describe_index(create_sl_index_params["name"]) assert desc.status.ready == True def test_create_index_when_timeout_set(self, client, create_sl_index_params): - create_sl_index_params['timeout'] = 1000 # effectively infinite, but different code path from None + create_sl_index_params["timeout"] = 1000 # effectively infinite, but different code path from None client.create_index(**create_sl_index_params) - desc = client.describe_index(create_sl_index_params['name']) + desc = client.describe_index(create_sl_index_params["name"]) assert desc.status.ready == True def test_create_index_when_timeout_error(self, client, create_sl_index_params): - create_sl_index_params['timeout'] = 1 + create_sl_index_params["timeout"] = 1 with pytest.raises(TimeoutError): client.create_index(**create_sl_index_params) def test_create_index_with_negative_timeout(self, client, create_sl_index_params): - create_sl_index_params['timeout'] = -1 + create_sl_index_params["timeout"] = -1 client.create_index(**create_sl_index_params) - desc = client.describe_index(create_sl_index_params['name']) + desc = client.describe_index(create_sl_index_params["name"]) # Returns immediately without waiting for index to be ready assert desc.status.ready == False - diff --git a/tests/integration/control/serverless/test_create_index_type_errors.py b/tests/integration/control/serverless/test_create_index_type_errors.py index 252b6840..04cb99d5 100644 --- a/tests/integration/control/serverless/test_create_index_type_errors.py +++ b/tests/integration/control/serverless/test_create_index_type_errors.py @@ -1,13 +1,14 @@ import pytest from pinecone import PineconeApiTypeError + class TestCreateIndexTypeErrorCases: def test_create_index_with_invalid_str_dimension(self, client, create_sl_index_params): - create_sl_index_params['dimension'] = '10' + create_sl_index_params["dimension"] = "10" with pytest.raises(PineconeApiTypeError): client.create_index(**create_sl_index_params) def test_create_index_with_missing_dimension(self, client, create_sl_index_params): - del create_sl_index_params['dimension'] + del create_sl_index_params["dimension"] with pytest.raises(TypeError): - client.create_index(**create_sl_index_params) \ No newline at end of file + client.create_index(**create_sl_index_params) diff --git a/tests/integration/control/serverless/test_describe_index.py b/tests/integration/control/serverless/test_describe_index.py index 90c96bf3..6b3fae4a 100644 --- a/tests/integration/control/serverless/test_describe_index.py +++ b/tests/integration/control/serverless/test_describe_index.py @@ -1,22 +1,23 @@ import pytest from pinecone import IndexModel + class TestDescribeIndex: def test_describe_index_when_ready(self, client, ready_sl_index, create_sl_index_params): description = client.describe_index(ready_sl_index) - + assert type(description) == IndexModel assert description.name == ready_sl_index - assert description.dimension == create_sl_index_params['dimension'] - assert description.metric == create_sl_index_params['metric'] - assert description.spec.serverless['cloud'] == create_sl_index_params['spec']['serverless']['cloud'] - assert description.spec.serverless['region'] == create_sl_index_params['spec']['serverless']['region'] + assert description.dimension == create_sl_index_params["dimension"] + assert description.metric == create_sl_index_params["metric"] + assert description.spec.serverless["cloud"] == create_sl_index_params["spec"]["serverless"]["cloud"] + assert description.spec.serverless["region"] == create_sl_index_params["spec"]["serverless"]["region"] assert type(description.host) == str assert description.host != "" assert ready_sl_index in description.host - assert description.status.state == 'Ready' + assert description.status.state == "Ready" assert description.status.ready == True def test_describe_index_when_not_ready(self, client, notready_sl_index, create_sl_index_params): @@ -24,14 +25,14 @@ def test_describe_index_when_not_ready(self, client, notready_sl_index, create_s assert type(description) == IndexModel assert description.name == notready_sl_index - assert description.dimension == create_sl_index_params['dimension'] - assert description.metric == create_sl_index_params['metric'] - assert description.spec.serverless['cloud'] == create_sl_index_params['spec']['serverless']['cloud'] - assert description.spec.serverless['region'] == create_sl_index_params['spec']['serverless']['region'] + assert description.dimension == create_sl_index_params["dimension"] + assert description.metric == create_sl_index_params["metric"] + assert description.spec.serverless["cloud"] == create_sl_index_params["spec"]["serverless"]["cloud"] + assert description.spec.serverless["region"] == create_sl_index_params["spec"]["serverless"]["region"] assert type(description.host) == str assert description.host != "" assert notready_sl_index in description.host assert description.status.ready == False - assert description.status.state in ['Ready', 'Initializing'] \ No newline at end of file + assert description.status.state in ["Ready", "Initializing"] diff --git a/tests/integration/control/serverless/test_list_indexes.py b/tests/integration/control/serverless/test_list_indexes.py index e3d7ae83..29136db5 100644 --- a/tests/integration/control/serverless/test_list_indexes.py +++ b/tests/integration/control/serverless/test_list_indexes.py @@ -1,15 +1,16 @@ from pinecone import IndexModel -class TestListIndexes(): + +class TestListIndexes: def test_list_indexes_includes_ready_indexes(self, client, ready_sl_index, create_sl_index_params): list_response = client.list_indexes() assert len(list_response.indexes) != 0 assert type(list_response.indexes[0]) == IndexModel - + created_index = [index for index in list_response.indexes if index.name == ready_sl_index][0] assert created_index.name == ready_sl_index - assert created_index.dimension == create_sl_index_params['dimension'] - assert created_index.metric == create_sl_index_params['metric'] + assert created_index.dimension == create_sl_index_params["dimension"] + assert created_index.metric == create_sl_index_params["metric"] assert ready_sl_index in created_index.host def test_list_indexes_includes_not_ready_indexes(self, client, notready_sl_index): diff --git a/tests/integration/data/conftest.py b/tests/integration/data/conftest.py index 16b3e13b..c69b8d44 100644 --- a/tests/integration/data/conftest.py +++ b/tests/integration/data/conftest.py @@ -13,74 +13,84 @@ # - environment: free vs paid # - with metadata vs without metadata + def api_key(): - return get_environment_var('PINECONE_API_KEY') + return get_environment_var("PINECONE_API_KEY") + def use_grpc(): - return os.environ.get('USE_GRPC', 'false') == 'true' + return os.environ.get("USE_GRPC", "false") == "true" + def build_client(): if use_grpc(): from pinecone.grpc import PineconeGRPC + return PineconeGRPC(api_key=api_key()) else: from pinecone import Pinecone - return Pinecone(api_key=api_key(), additional_headers={'sdk-test-suite': 'pinecone-python-client'}) -@pytest.fixture(scope='session') + return Pinecone(api_key=api_key(), additional_headers={"sdk-test-suite": "pinecone-python-client"}) + + +@pytest.fixture(scope="session") def api_key_fixture(): return api_key() -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def client(): return build_client() - -@pytest.fixture(scope='session') + + +@pytest.fixture(scope="session") def metric(): - return get_environment_var('METRIC', 'cosine') + return get_environment_var("METRIC", "cosine") -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def spec(): - return json.loads(get_environment_var('SPEC')) + return json.loads(get_environment_var("SPEC")) + -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def index_name(): - return 'dataplane-' + random_string(20) - -@pytest.fixture(scope='session') + return "dataplane-" + random_string(20) + + +@pytest.fixture(scope="session") def namespace(): # return 'banana' return random_string(10) -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def list_namespace(): # return 'list-banana' return random_string(10) -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def idx(client, index_name, index_host): return client.Index(name=index_name, host=index_host) -@pytest.fixture(scope='session') + +@pytest.fixture(scope="session") def index_host(index_name, metric, spec): pc = build_client() - print('Creating index with name: ' + index_name) + print("Creating index with name: " + index_name) if index_name not in pc.list_indexes().names(): - pc.create_index( - name=index_name, - dimension=2, - metric=metric, - spec=spec - ) + pc.create_index(name=index_name, dimension=2, metric=metric, spec=spec) description = pc.describe_index(name=index_name) yield description.host - print('Deleting index with name: ' + index_name) + print("Deleting index with name: " + index_name) pc.delete_index(index_name, -1) -@pytest.fixture(scope='session', autouse=True) + +@pytest.fixture(scope="session", autouse=True) def seed_data(idx, namespace, index_host, list_namespace): - print('Seeding data in host ' + index_host) + print("Seeding data in host " + index_host) print('Seeding list data in namespace "' + list_namespace + '"') setup_list_data(idx, list_namespace, True) @@ -89,9 +99,9 @@ def seed_data(idx, namespace, index_host, list_namespace): setup_data(idx, namespace, False) print('Seeding data in namespace ""') - setup_data(idx, '', True) + setup_data(idx, "", True) - print('Waiting a bit more to ensure freshness') + print("Waiting a bit more to ensure freshness") time.sleep(120) - yield \ No newline at end of file + yield diff --git a/tests/integration/data/seed.py b/tests/integration/data/seed.py index 26491fee..fba0fc57 100644 --- a/tests/integration/data/seed.py +++ b/tests/integration/data/seed.py @@ -2,45 +2,44 @@ from pinecone import Vector from .utils import embedding_values + def setup_data(idx, target_namespace, wait): # Upsert without metadata - idx.upsert(vectors=[ - ('1', embedding_values(2)), - ('2', embedding_values(2)), - ('3', embedding_values(2)) - ], - namespace=target_namespace + idx.upsert( + vectors=[("1", embedding_values(2)), ("2", embedding_values(2)), ("3", embedding_values(2))], + namespace=target_namespace, ) # Upsert with metadata - idx.upsert(vectors=[ - Vector(id='4', values=embedding_values(2), metadata={'genre': 'action', 'runtime': 120 }), - Vector(id='5', values=embedding_values(2), metadata={'genre': 'comedy', 'runtime': 90 }), - Vector(id='6', values=embedding_values(2), metadata={'genre': 'romance', 'runtime': 240 }) - ], - namespace=target_namespace + idx.upsert( + vectors=[ + Vector(id="4", values=embedding_values(2), metadata={"genre": "action", "runtime": 120}), + Vector(id="5", values=embedding_values(2), metadata={"genre": "comedy", "runtime": 90}), + Vector(id="6", values=embedding_values(2), metadata={"genre": "romance", "runtime": 240}), + ], + namespace=target_namespace, ) # Upsert with dict - idx.upsert(vectors=[ - {'id': '7', 'values': embedding_values(2)}, - {'id': '8', 'values': embedding_values(2)}, - {'id': '9', 'values': embedding_values(2)} - ], - namespace=target_namespace + idx.upsert( + vectors=[ + {"id": "7", "values": embedding_values(2)}, + {"id": "8", "values": embedding_values(2)}, + {"id": "9", "values": embedding_values(2)}, + ], + namespace=target_namespace, ) if wait: - poll_fetch_for_ids_in_namespace(idx, ids=['1', '2', '3', '4', '5', '6', '7', '8', '9'], namespace=target_namespace) + poll_fetch_for_ids_in_namespace( + idx, ids=["1", "2", "3", "4", "5", "6", "7", "8", "9"], namespace=target_namespace + ) + def setup_list_data(idx, target_namespace, wait): # Upsert a bunch more stuff for testing list pagination for i in range(0, 1000, 50): - idx.upsert(vectors=[ - (str(i+d), embedding_values(2)) for d in range(50) - ], - namespace=target_namespace - ) - + idx.upsert(vectors=[(str(i + d), embedding_values(2)) for d in range(50)], namespace=target_namespace) + if wait: - poll_fetch_for_ids_in_namespace(idx, ids=['999'], namespace=target_namespace) + poll_fetch_for_ids_in_namespace(idx, ids=["999"], namespace=target_namespace) diff --git a/tests/integration/data/test_fetch.py b/tests/integration/data/test_fetch.py index 7151be55..a1646f38 100644 --- a/tests/integration/data/test_fetch.py +++ b/tests/integration/data/test_fetch.py @@ -1,84 +1,78 @@ import pytest from pinecone import PineconeException, FetchResponse + class TestFetch: def setup_method(self): self.expected_dimension = 2 - @pytest.mark.parametrize('use_nondefault_namespace', [True, False]) - def test_fetch_multiple_by_id( - self, - idx, - namespace, - use_nondefault_namespace - ): - target_namespace = namespace if use_nondefault_namespace else '' + @pytest.mark.parametrize("use_nondefault_namespace", [True, False]) + def test_fetch_multiple_by_id(self, idx, namespace, use_nondefault_namespace): + target_namespace = namespace if use_nondefault_namespace else "" - results = idx.fetch(ids=['1', '2', '4'], namespace=target_namespace) + results = idx.fetch(ids=["1", "2", "4"], namespace=target_namespace) assert isinstance(results, FetchResponse) == True assert results.usage != None - assert results.usage['read_units'] != None - assert results.usage['read_units'] > 0 + assert results.usage["read_units"] != None + assert results.usage["read_units"] > 0 assert results.namespace == target_namespace assert len(results.vectors) == 3 - assert results.vectors['1'].id == '1' - assert results.vectors['2'].id == '2' + assert results.vectors["1"].id == "1" + assert results.vectors["2"].id == "2" # Metadata included, if set - assert results.vectors['1'].metadata == None - assert results.vectors['2'].metadata == None - assert results.vectors['4'].metadata != None - assert results.vectors['4'].metadata['genre'] == 'action' - assert results.vectors['4'].metadata['runtime'] == 120 + assert results.vectors["1"].metadata == None + assert results.vectors["2"].metadata == None + assert results.vectors["4"].metadata != None + assert results.vectors["4"].metadata["genre"] == "action" + assert results.vectors["4"].metadata["runtime"] == 120 # Values included - assert results.vectors['1'].values != None - assert len(results.vectors['1'].values) == self.expected_dimension - + assert results.vectors["1"].values != None + assert len(results.vectors["1"].values) == self.expected_dimension - @pytest.mark.parametrize('use_nondefault_namespace', [True, False]) + @pytest.mark.parametrize("use_nondefault_namespace", [True, False]) def test_fetch_single_by_id(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" - results = idx.fetch(ids=['1'], namespace=target_namespace) + results = idx.fetch(ids=["1"], namespace=target_namespace) assert results.namespace == target_namespace assert len(results.vectors) == 1 - assert results.vectors['1'].id == '1' - assert results.vectors['1'].metadata == None - assert results.vectors['1'].values != None - assert len(results.vectors['1'].values) == self.expected_dimension + assert results.vectors["1"].id == "1" + assert results.vectors["1"].metadata == None + assert results.vectors["1"].values != None + assert len(results.vectors["1"].values) == self.expected_dimension - @pytest.mark.parametrize('use_nondefault_namespace', [True, False]) + @pytest.mark.parametrize("use_nondefault_namespace", [True, False]) def test_fetch_nonexistent_id(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" # Fetch id that is missing - results = idx.fetch(ids=['100'], namespace=target_namespace) + results = idx.fetch(ids=["100"], namespace=target_namespace) assert results.namespace == target_namespace assert len(results.vectors) == 0 def test_fetch_nonexistent_namespace(self, idx): - target_namespace = 'nonexistent-namespace' + target_namespace = "nonexistent-namespace" # Fetch from namespace with no vectors - results = idx.fetch(ids=['1'], namespace=target_namespace) + results = idx.fetch(ids=["1"], namespace=target_namespace) assert results.namespace == target_namespace assert len(results.vectors) == 0 - @pytest.mark.parametrize('use_nondefault_namespace', [True, False]) + @pytest.mark.parametrize("use_nondefault_namespace", [True, False]) def test_fetch_with_empty_list_of_ids(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" # Fetch with empty list of ids with pytest.raises(PineconeException) as e: idx.fetch(ids=[], namespace=target_namespace) - assert 'ids' in str(e.value).lower() + assert "ids" in str(e.value).lower() def test_fetch_unspecified_namespace(self, idx): # Fetch without specifying namespace gives default namespace results - results = idx.fetch(ids=['1', '4']) - assert results.namespace == '' - assert results.vectors['1'].id == '1' - assert results.vectors['1'].values != None - assert results.vectors['4'].metadata != None - + results = idx.fetch(ids=["1", "4"]) + assert results.namespace == "" + assert results.vectors["1"].id == "1" + assert results.vectors["1"].values != None + assert results.vectors["4"].metadata != None diff --git a/tests/integration/data/test_initialization.py b/tests/integration/data/test_initialization.py index 06273f0a..e73577af 100644 --- a/tests/integration/data/test_initialization.py +++ b/tests/integration/data/test_initialization.py @@ -1,38 +1,39 @@ import pytest + class TestIndexClientInitialization: def test_index_direct_host_kwarg(self, client, index_host): index = client.Index(host=index_host) - index.fetch(ids=['1', '2', '3']) + index.fetch(ids=["1", "2", "3"]) def test_index_direct_host_with_https(self, client, index_host): - if not index_host.startswith('https://'): - index_host = 'https://' + index_host + if not index_host.startswith("https://"): + index_host = "https://" + index_host index = client.Index(host=index_host) - index.fetch(ids=['1', '2', '3']) + index.fetch(ids=["1", "2", "3"]) def test_index_direct_host_without_https(self, client, index_host): - if index_host.startswith('https://'): + if index_host.startswith("https://"): index_host = index_host[8:] index = client.Index(host=index_host) - index.fetch(ids=['1', '2', '3']) + index.fetch(ids=["1", "2", "3"]) def test_index_by_name_positional_only(self, client, index_name, index_host): index = client.Index(index_name) - index.fetch(ids=['1', '2', '3']) + index.fetch(ids=["1", "2", "3"]) def test_index_by_name_positional_with_host(self, client, index_name, index_host): index = client.Index(index_name, index_host) - index.fetch(ids=['1', '2', '3']) + index.fetch(ids=["1", "2", "3"]) def test_index_by_name_kwargs(self, client, index_name): index = client.Index(name=index_name) - index.fetch(ids=['1', '2', '3']) + index.fetch(ids=["1", "2", "3"]) def test_index_by_name_kwargs_with_host(self, client, index_name, index_host): index = client.Index(name=index_name, host=index_host) - index.fetch(ids=['1', '2', '3']) + index.fetch(ids=["1", "2", "3"]) def test_raises_when_no_name_or_host(self, client, index_host): with pytest.raises(ValueError): - client.Index() \ No newline at end of file + client.Index() diff --git a/tests/integration/data/test_list.py b/tests/integration/data/test_list.py index 5a8d4c18..578afe73 100644 --- a/tests/integration/data/test_list.py +++ b/tests/integration/data/test_list.py @@ -1,52 +1,52 @@ from pinecone import Vector + class TestListPaginated: def test_list_when_no_results(self, idx): - results = idx.list_paginated(namespace='no-results') + results = idx.list_paginated(namespace="no-results") assert results != None - assert results.namespace == 'no-results' + assert results.namespace == "no-results" assert len(results.vectors) == 0 # assert results.pagination == None def test_list_no_args(self, idx): results = idx.list_paginated() - + assert results != None assert len(results.vectors) == 9 - assert results.namespace == '' + assert results.namespace == "" # assert results.pagination == None - + def test_list_when_limit(self, idx, list_namespace): results = idx.list_paginated(limit=10, namespace=list_namespace) - + assert results != None assert len(results.vectors) == 10 assert results.namespace == list_namespace assert results.pagination != None assert results.pagination.next != None assert isinstance(results.pagination.next, str) - assert results.pagination.next != '' + assert results.pagination.next != "" def test_list_when_using_pagination(self, idx, list_namespace): - results = idx.list_paginated( - prefix='99', limit=5, namespace=list_namespace - ) + results = idx.list_paginated(prefix="99", limit=5, namespace=list_namespace) next_results = idx.list_paginated( - prefix='99', limit=5, namespace=list_namespace, pagination_token=results.pagination.next + prefix="99", limit=5, namespace=list_namespace, pagination_token=results.pagination.next ) next_next_results = idx.list_paginated( - prefix='99', limit=5, namespace=list_namespace, pagination_token=next_results.pagination.next + prefix="99", limit=5, namespace=list_namespace, pagination_token=next_results.pagination.next ) assert results.namespace == list_namespace assert len(results.vectors) == 5 - assert [v.id for v in results.vectors] == ['99', '990', '991', '992', '993'] + assert [v.id for v in results.vectors] == ["99", "990", "991", "992", "993"] assert len(next_results.vectors) == 5 - assert [v.id for v in next_results.vectors] == ['994', '995', '996', '997', '998'] + assert [v.id for v in next_results.vectors] == ["994", "995", "996", "997", "998"] assert len(next_next_results.vectors) == 1 - assert [v.id for v in next_next_results.vectors] == ['999'] + assert [v.id for v in next_next_results.vectors] == ["999"] # assert next_next_results.pagination == None + class TestList: def test_list_with_defaults(self, idx): pages = [] @@ -62,25 +62,25 @@ def test_list_with_defaults(self, idx): assert page_sizes == [9] def test_list(self, idx, list_namespace): - results = idx.list(prefix='99', limit=20, namespace=list_namespace) + results = idx.list(prefix="99", limit=20, namespace=list_namespace) page_count = 0 for ids in results: page_count += 1 assert ids != None assert len(ids) == 11 - assert ids == ['99', '990', '991', '992', '993', '994', '995', '996', '997', '998', '999'] + assert ids == ["99", "990", "991", "992", "993", "994", "995", "996", "997", "998", "999"] assert page_count == 1 def test_list_when_no_results_for_prefix(self, idx, list_namespace): page_count = 0 - for ids in idx.list(prefix='no-results', namespace=list_namespace): + for ids in idx.list(prefix="no-results", namespace=list_namespace): page_count += 1 assert page_count == 0 def test_list_when_no_results_for_namespace(self, idx): page_count = 0 - for ids in idx.list(prefix='99', namespace='no-results'): + for ids in idx.list(prefix="99", namespace="no-results"): page_count += 1 assert page_count == 0 @@ -89,7 +89,7 @@ def test_list_when_multiple_pages(self, idx, list_namespace): page_sizes = [] page_count = 0 - for ids in idx.list(prefix='99', limit=5, namespace=list_namespace): + for ids in idx.list(prefix="99", limit=5, namespace=list_namespace): page_count += 1 assert ids != None page_sizes.append(len(ids)) @@ -97,17 +97,19 @@ def test_list_when_multiple_pages(self, idx, list_namespace): assert page_count == 3 assert page_sizes == [5, 5, 1] - assert pages[0] == ['99', '990', '991', '992', '993'] - assert pages[1] == ['994', '995', '996', '997', '998'] - assert pages[2] == ['999'] + assert pages[0] == ["99", "990", "991", "992", "993"] + assert pages[1] == ["994", "995", "996", "997", "998"] + assert pages[2] == ["999"] def test_list_then_fetch(self, idx, list_namespace): vectors = [] - for ids in idx.list(prefix='99', limit=5, namespace=list_namespace): + for ids in idx.list(prefix="99", limit=5, namespace=list_namespace): result = idx.fetch(ids=ids, namespace=list_namespace) vectors.extend([v for _, v in result.vectors.items()]) assert len(vectors) == 11 assert isinstance(vectors[0], Vector) - assert set([v.id for v in vectors]) == set(['99', '990', '991', '992', '993', '994', '995', '996', '997', '998', '999']) \ No newline at end of file + assert set([v.id for v in vectors]) == set( + ["99", "990", "991", "992", "993", "994", "995", "996", "997", "998", "999"] + ) diff --git a/tests/integration/data/test_list_errors.py b/tests/integration/data/test_list_errors.py index 4f69752c..f0e9e1bf 100644 --- a/tests/integration/data/test_list_errors.py +++ b/tests/integration/data/test_list_errors.py @@ -1,16 +1,17 @@ from pinecone import PineconeException import pytest + class TestListErrors: def test_list_change_prefix_while_fetching_next_page(self, idx, list_namespace): - results = idx.list_paginated(prefix='99', limit=5, namespace=list_namespace) + results = idx.list_paginated(prefix="99", limit=5, namespace=list_namespace) with pytest.raises(PineconeException) as e: - idx.list_paginated(prefix='98', limit=5, pagination_token=results.pagination.next) - assert 'prefix' in str(e.value) + idx.list_paginated(prefix="98", limit=5, pagination_token=results.pagination.next) + assert "prefix" in str(e.value) - @pytest.mark.skip(reason='Bug filed') + @pytest.mark.skip(reason="Bug filed") def test_list_change_namespace_while_fetching_next_page(self, idx, namespace): results = idx.list_paginated(limit=5, namespace=namespace) with pytest.raises(PineconeException) as e: - idx.list_paginated(limit=5, namespace='new-namespace', pagination_token=results.pagination.next) - assert 'namespace' in str(e.value) \ No newline at end of file + idx.list_paginated(limit=5, namespace="new-namespace", pagination_token=results.pagination.next) + assert "namespace" in str(e.value) diff --git a/tests/integration/data/test_openapi_configuration.py b/tests/integration/data/test_openapi_configuration.py index e8b93389..26c12a03 100644 --- a/tests/integration/data/test_openapi_configuration.py +++ b/tests/integration/data/test_openapi_configuration.py @@ -5,14 +5,15 @@ from pinecone.core.client.configuration import Configuration as OpenApiConfiguration from urllib3 import make_headers -@pytest.mark.skipif(os.getenv('USE_GRPC') != 'false', reason='Only test when using REST') + +@pytest.mark.skipif(os.getenv("USE_GRPC") != "false", reason="Only test when using REST") class TestIndexOpenapiConfig: def test_passing_openapi_config(self, api_key_fixture, index_host): oai_config = OpenApiConfiguration.get_default_copy() p = Pinecone(api_key=api_key_fixture, openapi_config=oai_config) assert p.config.api_key == api_key_fixture - p.list_indexes() # should not throw - + p.list_indexes() # should not throw + index = p.Index(host=index_host) assert index._config.api_key == api_key_fixture - index.describe_index_stats() \ No newline at end of file + index.describe_index_stats() diff --git a/tests/integration/data/test_query.py b/tests/integration/data/test_query.py index e1c7ba88..960ad920 100644 --- a/tests/integration/data/test_query.py +++ b/tests/integration/data/test_query.py @@ -2,40 +2,42 @@ from pinecone import QueryResponse from .utils import embedding_values + def find_by_id(matches, id): with_id = [match for match in matches if match.id == id] return with_id[0] if len(with_id) > 0 else None -@pytest.mark.parametrize('use_nondefault_namespace', [True, False]) -class TestQuery: + +@pytest.mark.parametrize("use_nondefault_namespace", [True, False]) +class TestQuery: def setup_method(self): self.expected_dimension = 2 def test_query_by_id(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" - results = idx.query(id='1', namespace=target_namespace, top_k=10) + results = idx.query(id="1", namespace=target_namespace, top_k=10) assert isinstance(results, QueryResponse) == True assert results.namespace == target_namespace - + assert results.usage != None - assert results.usage['read_units'] != None - assert results.usage['read_units'] > 0 + assert results.usage["read_units"] != None + assert results.usage["read_units"] > 0 # By default, does not include values or metadata - record_with_metadata = find_by_id(results.matches, '4') + record_with_metadata = find_by_id(results.matches, "4") assert record_with_metadata.metadata == None assert record_with_metadata.values == [] def test_query_by_vector(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" results = idx.query(vector=embedding_values(2), namespace=target_namespace, top_k=10) assert isinstance(results, QueryResponse) == True assert results.namespace == target_namespace def test_query_by_vector_include_values(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" results = idx.query(vector=embedding_values(2), namespace=target_namespace, include_values=True, top_k=10) assert isinstance(results, QueryResponse) == True @@ -45,7 +47,7 @@ def test_query_by_vector_include_values(self, idx, namespace, use_nondefault_nam assert len(results.matches[0].values) == self.expected_dimension def test_query_by_vector_include_metadata(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" results = idx.query(vector=embedding_values(2), namespace=target_namespace, include_metadata=True, top_k=10) assert isinstance(results, QueryResponse) == True @@ -53,138 +55,142 @@ def test_query_by_vector_include_metadata(self, idx, namespace, use_nondefault_n matches_with_metadata = [match for match in results.matches if match.metadata != None] assert len(matches_with_metadata) == 3 - assert find_by_id(results.matches, '4').metadata['genre'] == 'action' + assert find_by_id(results.matches, "4").metadata["genre"] == "action" def test_query_by_vector_include_values_and_metadata(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" - results = idx.query(vector=embedding_values(2), namespace=target_namespace, include_values=True, include_metadata=True, top_k=10) + results = idx.query( + vector=embedding_values(2), namespace=target_namespace, include_values=True, include_metadata=True, top_k=10 + ) assert isinstance(results, QueryResponse) == True assert results.namespace == target_namespace matches_with_metadata = [match for match in results.matches if match.metadata != None] assert len(matches_with_metadata) == 3 - assert find_by_id(results.matches, '4').metadata['genre'] == 'action' + assert find_by_id(results.matches, "4").metadata["genre"] == "action" assert len(results.matches[0].values) == self.expected_dimension -class TestQueryEdgeCases(): + +class TestQueryEdgeCases: def test_query_in_empty_namespace(self, idx): - results = idx.query(id='1', namespace='empty', top_k=10) + results = idx.query(id="1", namespace="empty", top_k=10) assert isinstance(results, QueryResponse) == True - assert results.namespace == 'empty' + assert results.namespace == "empty" assert len(results.matches) == 0 -@pytest.mark.parametrize('use_nondefault_namespace', [True, False]) -class TestQueryWithFilter(): + +@pytest.mark.parametrize("use_nondefault_namespace", [True, False]) +class TestQueryWithFilter: def test_query_by_id_with_filter(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" - results = idx.query(id='1', namespace=target_namespace, filter={'genre': 'action'}, top_k=10) + results = idx.query(id="1", namespace=target_namespace, filter={"genre": "action"}, top_k=10) assert isinstance(results, QueryResponse) == True assert results.namespace == target_namespace assert len(results.matches) == 1 - assert results.matches[0].id == '4' + assert results.matches[0].id == "4" def test_query_by_id_with_filter_gt(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" # Vector(id='4', values=embedding_values(2), metadata={'genre': 'action', 'runtime': 120 }), # Vector(id='5', values=embedding_values(2), metadata={'genre': 'comedy', 'runtime': 90 }), # Vector(id='6', values=embedding_values(2), metadata={'genre': 'romance', 'runtime': 240 }) - results = idx.query(id='1', namespace=target_namespace, filter={'runtime': {'$gt': 100}}, top_k=10) + results = idx.query(id="1", namespace=target_namespace, filter={"runtime": {"$gt": 100}}, top_k=10) assert isinstance(results, QueryResponse) == True assert results.namespace == target_namespace assert len(results.matches) == 2 - assert find_by_id(results.matches, '4') != None - assert find_by_id(results.matches, '6') != None + assert find_by_id(results.matches, "4") != None + assert find_by_id(results.matches, "6") != None def test_query_by_id_with_filter_gte(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" # Vector(id='4', values=embedding_values(2), metadata={'genre': 'action', 'runtime': 120 }), # Vector(id='5', values=embedding_values(2), metadata={'genre': 'comedy', 'runtime': 90 }), # Vector(id='6', values=embedding_values(2), metadata={'genre': 'romance', 'runtime': 240 }) - results = idx.query(id='1', namespace=target_namespace, filter={'runtime': {'$gte': 90}}, top_k=10) + results = idx.query(id="1", namespace=target_namespace, filter={"runtime": {"$gte": 90}}, top_k=10) assert isinstance(results, QueryResponse) == True assert results.namespace == target_namespace assert len(results.matches) == 3 - assert find_by_id(results.matches, '4') != None - assert find_by_id(results.matches, '5') != None - assert find_by_id(results.matches, '6') != None + assert find_by_id(results.matches, "4") != None + assert find_by_id(results.matches, "5") != None + assert find_by_id(results.matches, "6") != None def test_query_by_id_with_filter_lt(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" # Vector(id='4', values=embedding_values(2), metadata={'genre': 'action', 'runtime': 120 }), # Vector(id='5', values=embedding_values(2), metadata={'genre': 'comedy', 'runtime': 90 }), # Vector(id='6', values=embedding_values(2), metadata={'genre': 'romance', 'runtime': 240 }) - results = idx.query(id='1', namespace=target_namespace, filter={'runtime': {'$lt': 100}}, top_k=10) + results = idx.query(id="1", namespace=target_namespace, filter={"runtime": {"$lt": 100}}, top_k=10) assert isinstance(results, QueryResponse) == True assert results.namespace == target_namespace assert len(results.matches) == 1 - assert find_by_id(results.matches, '5') != None + assert find_by_id(results.matches, "5") != None def test_query_by_id_with_filter_lte(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" # Vector(id='4', values=embedding_values(2), metadata={'genre': 'action', 'runtime': 120 }), # Vector(id='5', values=embedding_values(2), metadata={'genre': 'comedy', 'runtime': 90 }), # Vector(id='6', values=embedding_values(2), metadata={'genre': 'romance', 'runtime': 240 }) - results = idx.query(id='1', namespace=target_namespace, filter={'runtime': {'$lte': 120}}, top_k=10) + results = idx.query(id="1", namespace=target_namespace, filter={"runtime": {"$lte": 120}}, top_k=10) assert isinstance(results, QueryResponse) == True assert results.namespace == target_namespace assert len(results.matches) == 2 - assert find_by_id(results.matches, '4') != None - assert find_by_id(results.matches, '5') != None + assert find_by_id(results.matches, "4") != None + assert find_by_id(results.matches, "5") != None def test_query_by_id_with_filter_in(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" # Vector(id='4', values=embedding_values(2), metadata={'genre': 'action', 'runtime': 120 }), # Vector(id='5', values=embedding_values(2), metadata={'genre': 'comedy', 'runtime': 90 }), # Vector(id='6', values=embedding_values(2), metadata={'genre': 'romance', 'runtime': 240 }) - results = idx.query(id='1', namespace=target_namespace, filter={'genre': {'$in': ['romance']}}, top_k=10) + results = idx.query(id="1", namespace=target_namespace, filter={"genre": {"$in": ["romance"]}}, top_k=10) assert isinstance(results, QueryResponse) == True assert results.namespace == target_namespace assert len(results.matches) == 1 - assert find_by_id(results.matches, '6') != None + assert find_by_id(results.matches, "6") != None - @pytest.mark.skip(reason='Seems like a bug in the server') + @pytest.mark.skip(reason="Seems like a bug in the server") def test_query_by_id_with_filter_nin(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" # Vector(id='4', values=embedding_values(2), metadata={'genre': 'action', 'runtime': 120 }), # Vector(id='5', values=embedding_values(2), metadata={'genre': 'comedy', 'runtime': 90 }), # Vector(id='6', values=embedding_values(2), metadata={'genre': 'romance', 'runtime': 240 }) - results = idx.query(id='1', namespace=target_namespace, filter={'genre': {'$nin': ['romance']}}, top_k=10) + results = idx.query(id="1", namespace=target_namespace, filter={"genre": {"$nin": ["romance"]}}, top_k=10) assert isinstance(results, QueryResponse) == True assert results.namespace == target_namespace assert len(results.matches) == 2 - assert find_by_id(results.matches, '4') != None - assert find_by_id(results.matches, '5') != None + assert find_by_id(results.matches, "4") != None + assert find_by_id(results.matches, "5") != None def test_query_by_id_with_filter_eq(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" # Vector(id='4', values=embedding_values(2), metadata={'genre': 'action', 'runtime': 120 }), # Vector(id='5', values=embedding_values(2), metadata={'genre': 'comedy', 'runtime': 90 }), # Vector(id='6', values=embedding_values(2), metadata={'genre': 'romance', 'runtime': 240 }) - results = idx.query(id='1', namespace=target_namespace, filter={'genre': {'$eq': 'action'}}, top_k=10) + results = idx.query(id="1", namespace=target_namespace, filter={"genre": {"$eq": "action"}}, top_k=10) assert isinstance(results, QueryResponse) == True assert results.namespace == target_namespace assert len(results.matches) == 1 - assert find_by_id(results.matches, '4') != None + assert find_by_id(results.matches, "4") != None - @pytest.mark.skip(reason='Seems like a bug in the server') + @pytest.mark.skip(reason="Seems like a bug in the server") def test_query_by_id_with_filter_ne(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" # Vector(id='4', values=embedding_values(2), metadata={'genre': 'action', 'runtime': 120 }), # Vector(id='5', values=embedding_values(2), metadata={'genre': 'comedy', 'runtime': 90 }), # Vector(id='6', values=embedding_values(2), metadata={'genre': 'romance', 'runtime': 240 }) - results = idx.query(id='1', namespace=target_namespace, filter={'genre': {'$ne': 'action'}}, top_k=10) + results = idx.query(id="1", namespace=target_namespace, filter={"genre": {"$ne": "action"}}, top_k=10) assert isinstance(results, QueryResponse) == True assert results.namespace == target_namespace assert len(results.matches) == 2 - assert find_by_id(results.matches, '5') != None - assert find_by_id(results.matches, '6') != None + assert find_by_id(results.matches, "5") != None + assert find_by_id(results.matches, "6") != None diff --git a/tests/integration/data/test_query_errors.py b/tests/integration/data/test_query_errors.py index 2a9aea59..293f1e78 100644 --- a/tests/integration/data/test_query_errors.py +++ b/tests/integration/data/test_query_errors.py @@ -1,33 +1,33 @@ import pytest from pinecone import PineconeException -@pytest.mark.parametrize('use_nondefault_namespace', [True, False]) + +@pytest.mark.parametrize("use_nondefault_namespace", [True, False]) class TestQueryErrorCases: def test_query_with_invalid_vector(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" with pytest.raises(PineconeException) as e: idx.query(vector=[1, 2, 3], namespace=target_namespace, top_k=10) - - assert 'vector' in str(e.value).lower() + + assert "vector" in str(e.value).lower() def test_query_with_invalid_id(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" with pytest.raises(TypeError) as e: idx.query(id=1, namespace=target_namespace, top_k=10) def test_query_with_invalid_top_k(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" with pytest.raises((PineconeException, ValueError)) as e: - idx.query(id='1', namespace=target_namespace, top_k=-1) + idx.query(id="1", namespace=target_namespace, top_k=-1) def test_query_with_missing_top_k(self, idx, namespace, use_nondefault_namespace): - target_namespace = namespace if use_nondefault_namespace else '' + target_namespace = namespace if use_nondefault_namespace else "" with pytest.raises((TypeError, PineconeException)) as e: - idx.query(id='1', namespace=target_namespace) + idx.query(id="1", namespace=target_namespace) - assert 'top_k' in str(e.value).lower() - + assert "top_k" in str(e.value).lower() diff --git a/tests/integration/data/test_upsert.py b/tests/integration/data/test_upsert.py index 5105c9cb..ae2975ff 100644 --- a/tests/integration/data/test_upsert.py +++ b/tests/integration/data/test_upsert.py @@ -4,39 +4,35 @@ from ..helpers import poll_stats_for_namespace from .utils import embedding_values -@pytest.mark.parametrize('use_nondefault_namespace', [True, False]) -def test_upsert_to_namespace( - idx, - namespace, - use_nondefault_namespace -): - target_namespace = namespace if use_nondefault_namespace else '' - + +@pytest.mark.parametrize("use_nondefault_namespace", [True, False]) +def test_upsert_to_namespace(idx, namespace, use_nondefault_namespace): + target_namespace = namespace if use_nondefault_namespace else "" + # Upsert with tuples - idx.upsert(vectors=[ - ('1', embedding_values()), - ('2', embedding_values()), - ('3', embedding_values()) - ], - namespace=target_namespace + idx.upsert( + vectors=[("1", embedding_values()), ("2", embedding_values()), ("3", embedding_values())], + namespace=target_namespace, ) # Upsert with objects - idx.upsert(vectors=[ - Vector(id='4', values=embedding_values()), - Vector(id='5', values=embedding_values()), - Vector(id='6', values=embedding_values()) - ], - namespace=target_namespace + idx.upsert( + vectors=[ + Vector(id="4", values=embedding_values()), + Vector(id="5", values=embedding_values()), + Vector(id="6", values=embedding_values()), + ], + namespace=target_namespace, ) # Upsert with dict - idx.upsert(vectors=[ - {'id': '7', 'values': embedding_values()}, - {'id': '8', 'values': embedding_values()}, - {'id': '9', 'values': embedding_values()} - ], - namespace=target_namespace + idx.upsert( + vectors=[ + {"id": "7", "values": embedding_values()}, + {"id": "8", "values": embedding_values()}, + {"id": "9", "values": embedding_values()}, + ], + namespace=target_namespace, ) poll_stats_for_namespace(idx, target_namespace, 9) @@ -46,35 +42,37 @@ def test_upsert_to_namespace( assert stats.total_vector_count >= 9 assert stats.namespaces[target_namespace].vector_count == 9 -@pytest.mark.parametrize('use_nondefault_namespace', [True, False]) -@pytest.mark.skipif(os.getenv('METRIC') != 'dotproduct', reason='Only metric=dotprodouct indexes support hybrid') -def test_upsert_to_namespace_with_sparse_embedding_values( - idx, - namespace, - use_nondefault_namespace -): - target_namespace = namespace if use_nondefault_namespace else '' + +@pytest.mark.parametrize("use_nondefault_namespace", [True, False]) +@pytest.mark.skipif(os.getenv("METRIC") != "dotproduct", reason="Only metric=dotprodouct indexes support hybrid") +def test_upsert_to_namespace_with_sparse_embedding_values(idx, namespace, use_nondefault_namespace): + target_namespace = namespace if use_nondefault_namespace else "" # Upsert with sparse values object - idx.upsert(vectors=[ + idx.upsert( + vectors=[ Vector( - id='1', - values=embedding_values(), - sparse_values=SparseValues( - indices=[0,1], - values=embedding_values() - ) + id="1", values=embedding_values(), sparse_values=SparseValues(indices=[0, 1], values=embedding_values()) ), ], - namespace=target_namespace + namespace=target_namespace, ) # Upsert with sparse values dict - idx.upsert(vectors=[ - {'id': '2', 'values': embedding_values(),'sparse_values': {'indices': [0,1], 'values': embedding_values()}}, - {'id': '3', 'values': embedding_values(), 'sparse_values': {'indices': [0,1], 'values': embedding_values()}} + idx.upsert( + vectors=[ + { + "id": "2", + "values": embedding_values(), + "sparse_values": {"indices": [0, 1], "values": embedding_values()}, + }, + { + "id": "3", + "values": embedding_values(), + "sparse_values": {"indices": [0, 1], "values": embedding_values()}, + }, ], - namespace=target_namespace + namespace=target_namespace, ) poll_stats_for_namespace(idx, target_namespace, 9) @@ -83,4 +81,3 @@ def test_upsert_to_namespace_with_sparse_embedding_values( stats = idx.describe_index_stats() assert stats.total_vector_count >= 9 assert stats.namespaces[target_namespace].vector_count == 9 - diff --git a/tests/integration/data/test_upsert_errors.py b/tests/integration/data/test_upsert_errors.py index 1a360b63..de90d4b6 100644 --- a/tests/integration/data/test_upsert_errors.py +++ b/tests/integration/data/test_upsert_errors.py @@ -5,186 +5,144 @@ from ..helpers import fake_api_key from pinecone import PineconeException -class TestUpsertApiKeyMissing(): + +class TestUpsertApiKeyMissing: def test_upsert_fails_when_api_key_invalid(self, index_name, index_host): with pytest.raises(PineconeException): from pinecone import Pinecone - pc = Pinecone( - api_key=fake_api_key(), - additional_headers={'sdk-test-suite': 'pinecone-python-client'} - ) + + pc = Pinecone(api_key=fake_api_key(), additional_headers={"sdk-test-suite": "pinecone-python-client"}) idx = pc.Index(name=index_name, host=index_host) - idx.upsert(vectors=[ - Vector(id='1', values=embedding_values()), - Vector(id='2', values=embedding_values()) - ] - ) - @pytest.mark.skipif(os.getenv('USE_GRPC') != 'true', reason='Only test grpc client when grpc extras') + idx.upsert(vectors=[Vector(id="1", values=embedding_values()), Vector(id="2", values=embedding_values())]) + + @pytest.mark.skipif(os.getenv("USE_GRPC") != "true", reason="Only test grpc client when grpc extras") def test_upsert_fails_when_api_key_invalid_grpc(self, index_name, index_host): with pytest.raises(PineconeException): from pinecone.grpc import PineconeGRPC + pc = PineconeGRPC(api_key=fake_api_key()) idx = pc.Index(name=index_name, host=index_host) - idx.upsert(vectors=[ - Vector(id='1', values=embedding_values()), - Vector(id='2', values=embedding_values()) - ] - ) + idx.upsert(vectors=[Vector(id="1", values=embedding_values()), Vector(id="2", values=embedding_values())]) -class TestUpsertFailsWhenDimensionMismatch(): + +class TestUpsertFailsWhenDimensionMismatch: def test_upsert_fails_when_dimension_mismatch_objects(self, idx): with pytest.raises(PineconeException): - idx.upsert(vectors=[ - Vector(id='1', values=embedding_values(2)), - Vector(id='2', values=embedding_values(3)) - ]) - + idx.upsert(vectors=[Vector(id="1", values=embedding_values(2)), Vector(id="2", values=embedding_values(3))]) + def test_upsert_fails_when_dimension_mismatch_tuples(self, idx): with pytest.raises(PineconeException): - idx.upsert(vectors=[ - ('1', embedding_values(2)), - ('2', embedding_values(3)) - ]) - + idx.upsert(vectors=[("1", embedding_values(2)), ("2", embedding_values(3))]) + def test_upsert_fails_when_dimension_mismatch_dicts(self, idx): with pytest.raises(PineconeException): - idx.upsert(vectors=[ - {'id': '1', 'values': embedding_values(2)}, - {'id': '2', 'values': embedding_values(3)} - ]) + idx.upsert(vectors=[{"id": "1", "values": embedding_values(2)}, {"id": "2", "values": embedding_values(3)}]) -@pytest.mark.skipif(os.getenv('METRIC') != 'dotproduct', reason='Only metric=dotprodouct indexes support hybrid') -class TestUpsertFailsSparseValuesDimensionMismatch(): + +@pytest.mark.skipif(os.getenv("METRIC") != "dotproduct", reason="Only metric=dotprodouct indexes support hybrid") +class TestUpsertFailsSparseValuesDimensionMismatch: def test_upsert_fails_when_sparse_values_indices_values_mismatch_objects(self, idx): with pytest.raises(PineconeException): - idx.upsert(vectors=[ - Vector(id='1', values=[0.1, 0.1], sparse_values=SparseValues(indices=[0], values=[0.5, 0.5])) - ]) + idx.upsert( + vectors=[Vector(id="1", values=[0.1, 0.1], sparse_values=SparseValues(indices=[0], values=[0.5, 0.5]))] + ) with pytest.raises(PineconeException): - idx.upsert(vectors=[ - Vector(id='1', values=[0.1, 0.1], sparse_values=SparseValues(indices=[0, 1], values=[0.5])) - ]) - + idx.upsert( + vectors=[Vector(id="1", values=[0.1, 0.1], sparse_values=SparseValues(indices=[0, 1], values=[0.5]))] + ) + def test_upsert_fails_when_sparse_values_in_tuples(self, idx): with pytest.raises(ValueError): - idx.upsert(vectors=[ - ('1', SparseValues(indices=[0], values=[0.5])), - ('2', SparseValues(indices=[0, 1, 2], values=[0.5, 0.5, 0.5])) - ]) - + idx.upsert( + vectors=[ + ("1", SparseValues(indices=[0], values=[0.5])), + ("2", SparseValues(indices=[0, 1, 2], values=[0.5, 0.5, 0.5])), + ] + ) + def test_upsert_fails_when_sparse_values_indices_values_mismatch_dicts(self, idx): with pytest.raises(PineconeException): - idx.upsert(vectors=[ - {'id': '1', 'values': [0.2, 0.2], 'sparse_values': SparseValues(indices=[0], values=[0.5, 0.5])} - ]) + idx.upsert( + vectors=[ + {"id": "1", "values": [0.2, 0.2], "sparse_values": SparseValues(indices=[0], values=[0.5, 0.5])} + ] + ) with pytest.raises(PineconeException): - idx.upsert(vectors=[ - {'id': '1', 'values': [0.1, 0.2], 'sparse_values': SparseValues(indices=[0, 1], values=[0.5])} - ]) + idx.upsert( + vectors=[{"id": "1", "values": [0.1, 0.2], "sparse_values": SparseValues(indices=[0, 1], values=[0.5])}] + ) + -class TestUpsertFailsWhenValuesMissing(): +class TestUpsertFailsWhenValuesMissing: def test_upsert_fails_when_values_missing_objects(self, idx): with pytest.raises(TypeError): - idx.upsert(vectors=[ - Vector(id='1'), - Vector(id='2') - ]) - + idx.upsert(vectors=[Vector(id="1"), Vector(id="2")]) + def test_upsert_fails_when_values_missing_tuples(self, idx): with pytest.raises(ValueError): - idx.upsert(vectors=[ - ('1',), - ('2',) - ]) - + idx.upsert(vectors=[("1",), ("2",)]) + def test_upsert_fails_when_values_missing_dicts(self, idx): with pytest.raises(ValueError): - idx.upsert(vectors=[ - {'id': '1'}, - {'id': '2'} - ]) - -class TestUpsertFailsWhenValuesWrongType(): + idx.upsert(vectors=[{"id": "1"}, {"id": "2"}]) + + +class TestUpsertFailsWhenValuesWrongType: def test_upsert_fails_when_values_wrong_type_objects(self, idx): with pytest.raises(PineconeException): - idx.upsert(vectors=[ - Vector(id='1', values='abc'), - Vector(id='2', values='def') - ]) - + idx.upsert(vectors=[Vector(id="1", values="abc"), Vector(id="2", values="def")]) + def test_upsert_fails_when_values_wrong_type_tuples(self, idx): - if os.environ.get('USE_GRPC', 'false') == 'true': + if os.environ.get("USE_GRPC", "false") == "true": expected_exception = TypeError else: expected_exception = PineconeException with pytest.raises(expected_exception): - idx.upsert(vectors=[ - ('1', 'abc'), - ('2', 'def') - ]) - + idx.upsert(vectors=[("1", "abc"), ("2", "def")]) + def test_upsert_fails_when_values_wrong_type_dicts(self, idx): with pytest.raises(TypeError): - idx.upsert(vectors=[ - {'id': '1', 'values': 'abc'}, - {'id': '2', 'values': 'def'} - ]) + idx.upsert(vectors=[{"id": "1", "values": "abc"}, {"id": "2", "values": "def"}]) -class TestUpsertFailsWhenVectorsMissing(): + +class TestUpsertFailsWhenVectorsMissing: def test_upsert_fails_when_vectors_empty(self, idx): with pytest.raises(PineconeException): idx.upsert(vectors=[]) def test_upsert_fails_when_vectors_wrong_type(self, idx): with pytest.raises(ValueError): - idx.upsert(vectors='abc') + idx.upsert(vectors="abc") def test_upsert_fails_when_vectors_missing(self, idx): with pytest.raises(TypeError): idx.upsert() -class TestUpsertIdMissing(): + +class TestUpsertIdMissing: def test_upsert_fails_when_id_is_missing_objects(self, idx): with pytest.raises(TypeError): - idx.upsert(vectors=[ - Vector(id='1', values=embedding_values()), - Vector(values=embedding_values()) - ]) - + idx.upsert(vectors=[Vector(id="1", values=embedding_values()), Vector(values=embedding_values())]) + def test_upsert_fails_when_id_is_missing_tuples(self, idx): with pytest.raises(ValueError): - idx.upsert(vectors=[ - ('1', embedding_values()), - (embedding_values()) - ]) - + idx.upsert(vectors=[("1", embedding_values()), (embedding_values())]) + def test_upsert_fails_when_id_is_missing_dicts(self, idx): with pytest.raises(ValueError): - idx.upsert(vectors=[ - {'id': '1', 'values': embedding_values()}, - {'values': embedding_values()} - ]) + idx.upsert(vectors=[{"id": "1", "values": embedding_values()}, {"values": embedding_values()}]) -class TestUpsertIdWrongType(): +class TestUpsertIdWrongType: def test_upsert_fails_when_id_wrong_type_objects(self, idx): with pytest.raises(Exception): - idx.upsert(vectors=[ - Vector(id='1', values=embedding_values()), - Vector(id=2, values=embedding_values()) - ]) - + idx.upsert(vectors=[Vector(id="1", values=embedding_values()), Vector(id=2, values=embedding_values())]) + def test_upsert_fails_when_id_wrong_type_tuples(self, idx): with pytest.raises(Exception): - idx.upsert(vectors=[ - ('1', embedding_values()), - (2, embedding_values()) - ]) - + idx.upsert(vectors=[("1", embedding_values()), (2, embedding_values())]) + def test_upsert_fails_when_id_wrong_type_dicts(self, idx): with pytest.raises(Exception): - idx.upsert(vectors=[ - {'id': '1', 'values': embedding_values()}, - {'id': 2, 'values': embedding_values()} - ]) \ No newline at end of file + idx.upsert(vectors=[{"id": "1", "values": embedding_values()}, {"id": 2, "values": embedding_values()}]) diff --git a/tests/integration/data/utils.py b/tests/integration/data/utils.py index 8023ce22..0a0905bf 100644 --- a/tests/integration/data/utils.py +++ b/tests/integration/data/utils.py @@ -1,5 +1,5 @@ import random + def embedding_values(dimension=2): return [random.random() for _ in range(dimension)] - diff --git a/tests/integration/helpers/__init__.py b/tests/integration/helpers/__init__.py index 6914ce1c..c7ddd18a 100644 --- a/tests/integration/helpers/__init__.py +++ b/tests/integration/helpers/__init__.py @@ -1,8 +1,8 @@ from .helpers import ( fake_api_key, - get_environment_var, + get_environment_var, random_string, generate_index_name, poll_stats_for_namespace, - poll_fetch_for_ids_in_namespace -) \ No newline at end of file + poll_fetch_for_ids_in_namespace, +) diff --git a/tests/integration/helpers/helpers.py b/tests/integration/helpers/helpers.py index e7da0173..f5b04b99 100644 --- a/tests/integration/helpers/helpers.py +++ b/tests/integration/helpers/helpers.py @@ -5,50 +5,56 @@ import string from typing import Any + def random_string(length): - return ''.join(random.choice(string.ascii_lowercase) for i in range(length)) + return "".join(random.choice(string.ascii_lowercase) for i in range(length)) + def generate_index_name(test_name: str) -> str: - buildNumber = os.getenv('GITHUB_BUILD_NUMBER', None) - - if test_name.startswith('test_'): + buildNumber = os.getenv("GITHUB_BUILD_NUMBER", None) + + if test_name.startswith("test_"): test_name = test_name[5:] # Trim name length to save space for other info in name test_name = test_name[:20] # Remove trailing underscore, if any - if test_name.endswith('_'): + if test_name.endswith("_"): test_name = test_name[:-1] name_parts = [buildNumber, test_name, random_string(45)] - index_name = '-'.join([x for x in name_parts if x is not None]) - + index_name = "-".join([x for x in name_parts if x is not None]) + # Remove invalid characters - replace_with_hyphen = re.compile(r'[\[\(_,\s]') - index_name = re.sub(replace_with_hyphen, '-', index_name) - replace_with_empty = re.compile(r'[\]\)\.]') - index_name = re.sub(replace_with_empty, '', index_name) + replace_with_hyphen = re.compile(r"[\[\(_,\s]") + index_name = re.sub(replace_with_hyphen, "-", index_name) + replace_with_empty = re.compile(r"[\]\)\.]") + index_name = re.sub(replace_with_empty, "", index_name) max_length = 45 index_name = index_name[:max_length] # Trim final character if it is not alphanumeric - if test_name.endswith('_') or test_name.endswith('-'): + if test_name.endswith("_") or test_name.endswith("-"): test_name = test_name[:-1] return index_name.lower() + def get_environment_var(name: str, defaultVal: Any = None) -> str: val = os.getenv(name, defaultVal) - if (val is None): - raise Exception('Expected environment variable ' + name + ' is not set') + if val is None: + raise Exception("Expected environment variable " + name + " is not set") else: return val -def poll_stats_for_namespace(idx, namespace, expected_count, max_sleep=int(os.environ.get('FRESHNESS_TIMEOUT_SECONDS', 60))): + +def poll_stats_for_namespace( + idx, namespace, expected_count, max_sleep=int(os.environ.get("FRESHNESS_TIMEOUT_SECONDS", 60)) +): delta_t = 5 - total_time=0 + total_time = 0 done = False while not done: print(f'Waiting for namespace "{namespace}" to have vectors. Total time waited: {total_time} seconds') @@ -56,15 +62,16 @@ def poll_stats_for_namespace(idx, namespace, expected_count, max_sleep=int(os.en if namespace in stats.namespaces and stats.namespaces[namespace].vector_count >= expected_count: done = True elif total_time > max_sleep: - raise TimeoutError(f'Timed out waiting for namespace {namespace} to have vectors') + raise TimeoutError(f"Timed out waiting for namespace {namespace} to have vectors") else: total_time += delta_t time.sleep(delta_t) + def poll_fetch_for_ids_in_namespace(idx, ids, namespace): - max_sleep=int(os.environ.get('FRESHNESS_TIMEOUT_SECONDS', 60)) + max_sleep = int(os.environ.get("FRESHNESS_TIMEOUT_SECONDS", 60)) delta_t = 5 - total_time=0 + total_time = 0 done = False while not done: print(f'Attempting to fetch from "{namespace}". Total time waited: {total_time} seconds') @@ -76,10 +83,11 @@ def poll_fetch_for_ids_in_namespace(idx, ids, namespace): done = True if total_time > max_sleep: - raise TimeoutError(f'Timed out waiting for namespace {namespace} to have vectors') + raise TimeoutError(f"Timed out waiting for namespace {namespace} to have vectors") else: total_time += delta_t time.sleep(delta_t) + def fake_api_key(): - return '-'.join([random_string(x) for x in [8, 4, 4, 4, 12]]) \ No newline at end of file + return "-".join([random_string(x) for x in [8, 4, 4, 4, 12]]) diff --git a/tests/integration/proxy_config/conftest.py b/tests/integration/proxy_config/conftest.py index 9f775efc..3e73e267 100644 --- a/tests/integration/proxy_config/conftest.py +++ b/tests/integration/proxy_config/conftest.py @@ -5,49 +5,58 @@ from ..helpers import get_environment_var PROXIES = { - 'proxy1': { - 'name': 'proxy1', - 'port': 8080, - 'ssl_ca_certs': os.path.abspath('./tests/integration/proxy_config/.mitm/proxy1'), - 'auth': None + "proxy1": { + "name": "proxy1", + "port": 8080, + "ssl_ca_certs": os.path.abspath("./tests/integration/proxy_config/.mitm/proxy1"), + "auth": None, + }, + "proxy2": { + "name": "proxy2", + "port": 8081, + "ssl_ca_certs": os.path.abspath("./tests/integration/proxy_config/.mitm/proxy2"), + "auth": ("testuser", "testpassword"), }, - 'proxy2': { - 'name': 'proxy2', - 'port': 8081, - 'ssl_ca_certs': os.path.abspath('./tests/integration/proxy_config/.mitm/proxy2'), - 'auth': ('testuser', 'testpassword') - } } + def docker_command(proxy): cmd = [ - "docker", "run", "-d", # detach to run in background - "--rm", # remove container when stopped - "--name", proxy['name'], # name the container - "-p", f"{proxy['port']}:8080", # map the port - "-v", f"{proxy['ssl_ca_certs']}:/home/mitmproxy/.mitmproxy", # mount config as volume - "mitmproxy/mitmproxy", # docker image name - "mitmdump" # command to run + "docker", + "run", + "-d", # detach to run in background + "--rm", # remove container when stopped + "--name", + proxy["name"], # name the container + "-p", + f"{proxy['port']}:8080", # map the port + "-v", + f"{proxy['ssl_ca_certs']}:/home/mitmproxy/.mitmproxy", # mount config as volume + "mitmproxy/mitmproxy", # docker image name + "mitmdump", # command to run ] - if proxy['auth']: + if proxy["auth"]: cmd.append(f"--set proxyauth={proxy['auth'][0]}:{proxy['auth'][1]}") print(" ".join(cmd)) return " ".join(cmd) + def run_cmd(cmd, output): output.write("Going to run: " + cmd + "\n") exit_code = subprocess.call(cmd, shell=True, stdout=output, stderr=output) if exit_code != 0: raise Exception(f"Failed to run command: {cmd}") + def use_grpc(): - return os.environ.get('USE_GRPC', 'false') == 'true' + return os.environ.get("USE_GRPC", "false") == "true" + -@pytest.fixture(scope='session', autouse=True) +@pytest.fixture(scope="session", autouse=True) def start_docker(): with open("tests/integration/proxy_config/logs/proxyconfig-docker-start.log", "a") as output: - run_cmd(docker_command(PROXIES['proxy1']), output) - run_cmd(docker_command(PROXIES['proxy2']), output) + run_cmd(docker_command(PROXIES["proxy1"]), output) + run_cmd(docker_command(PROXIES["proxy2"]), output) time.sleep(5) with open("tests/integration/proxy_config/logs/proxyconfig-docker-ps.log", "a") as output: @@ -58,27 +67,34 @@ def start_docker(): run_cmd("docker stop proxy1", output) run_cmd("docker stop proxy2", output) + @pytest.fixture() def proxy1(): - return PROXIES['proxy1'] + return PROXIES["proxy1"] + @pytest.fixture() def proxy2(): - return PROXIES['proxy2'] + return PROXIES["proxy2"] + @pytest.fixture() def client_cls(): if use_grpc(): from pinecone.grpc import PineconeGRPC + return PineconeGRPC else: from pinecone import Pinecone + return Pinecone + @pytest.fixture() def api_key(): - return get_environment_var('PINECONE_API_KEY') + return get_environment_var("PINECONE_API_KEY") + @pytest.fixture() def index_name(): - return get_environment_var('PINECONE_INDEX_NAME') \ No newline at end of file + return get_environment_var("PINECONE_INDEX_NAME") diff --git a/tests/integration/proxy_config/test_proxy_settings.py b/tests/integration/proxy_config/test_proxy_settings.py index f71346ea..91863acd 100644 --- a/tests/integration/proxy_config/test_proxy_settings.py +++ b/tests/integration/proxy_config/test_proxy_settings.py @@ -4,10 +4,11 @@ from urllib3 import make_headers from urllib3.exceptions import InsecureRequestWarning -PROXY1_URL_HTTPS = 'https://localhost:8080' -PROXY1_URL_HTTP = 'http://localhost:8080' +PROXY1_URL_HTTPS = "https://localhost:8080" +PROXY1_URL_HTTP = "http://localhost:8080" + +PROXY2_URL = "https://localhost:8081" -PROXY2_URL = 'https://localhost:8081' def exercise_all_apis(client, index_name): # Control plane @@ -16,32 +17,31 @@ def exercise_all_apis(client, index_name): index = client.Index(index_name) index.describe_index_stats() + class TestProxyConfig: - @pytest.mark.skipif(os.getenv('USE_GRPC') != 'false', - reason="gRPC doesn't support 'https://' proxy URLs") + @pytest.mark.skipif(os.getenv("USE_GRPC") != "false", reason="gRPC doesn't support 'https://' proxy URLs") def test_https_proxy_with_self_signed_cert(self, client_cls, api_key, index_name, proxy1): - ssl_ca_certs = os.path.join(proxy1['ssl_ca_certs'], 'mitmproxy-ca-cert.pem') + ssl_ca_certs = os.path.join(proxy1["ssl_ca_certs"], "mitmproxy-ca-cert.pem") pc = client_cls( - api_key=api_key, + api_key=api_key, proxy_url=PROXY1_URL_HTTPS, ssl_ca_certs=ssl_ca_certs, ) exercise_all_apis(pc, index_name) def test_http_proxy_with_self_signed_cert(self, client_cls, api_key, index_name, proxy1): - ssl_ca_certs = os.path.join(proxy1['ssl_ca_certs'], 'mitmproxy-ca-cert.pem') + ssl_ca_certs = os.path.join(proxy1["ssl_ca_certs"], "mitmproxy-ca-cert.pem") pc = client_cls( - api_key=api_key, + api_key=api_key, proxy_url=PROXY1_URL_HTTP, ssl_ca_certs=ssl_ca_certs, ) exercise_all_apis(pc, index_name) - @pytest.mark.skipif(os.getenv('USE_GRPC') != 'false', - reason="gRPC doesn't support disabling ssl_verify") + @pytest.mark.skipif(os.getenv("USE_GRPC") != "false", reason="gRPC doesn't support disabling ssl_verify") def test_proxy_with_ssl_verification_disabled_emits_warning(self, client_cls, api_key, index_name): pc = client_cls( - api_key=api_key, + api_key=api_key, proxy_url=PROXY1_URL_HTTPS, ssl_verify=False, ) @@ -54,14 +54,14 @@ def test_proxy_with_incorrect_cert_path(self, client_cls, api_key): pc = client_cls( api_key=api_key, proxy_url=PROXY1_URL_HTTPS, - ssl_ca_certs='~/incorrect/path', + ssl_ca_certs="~/incorrect/path", ) pc.list_indexes() - assert 'No such file or directory' in str(e.value) + assert "No such file or directory" in str(e.value) def test_proxy_with_valid_path_to_incorrect_cert(self, client_cls, api_key, proxy2): - ssl_ca_certs = os.path.join(proxy2['ssl_ca_certs'], 'mitmproxy-ca-cert.pem') + ssl_ca_certs = os.path.join(proxy2["ssl_ca_certs"], "mitmproxy-ca-cert.pem") with pytest.raises(Exception) as e: pc = client_cls( api_key=api_key, @@ -70,19 +70,17 @@ def test_proxy_with_valid_path_to_incorrect_cert(self, client_cls, api_key, prox ) pc.list_indexes() - assert 'CERTIFICATE_VERIFY_FAILED' in str(e.value) + assert "CERTIFICATE_VERIFY_FAILED" in str(e.value) - @pytest.mark.skipif(os.getenv('USE_GRPC') != 'false', - reason="gRPC doesn't support proxy auth") + @pytest.mark.skipif(os.getenv("USE_GRPC") != "false", reason="gRPC doesn't support proxy auth") def test_proxy_that_requires_proxyauth(self, client_cls, api_key, index_name, proxy2): - ssl_ca_certs = os.path.join(proxy2['ssl_ca_certs'], 'mitmproxy-ca-cert.pem') - username = proxy2['auth'][0] - password = proxy2['auth'][1] + ssl_ca_certs = os.path.join(proxy2["ssl_ca_certs"], "mitmproxy-ca-cert.pem") + username = proxy2["auth"][0] + password = proxy2["auth"][1] pc = client_cls( api_key=api_key, proxy_url=PROXY2_URL, - proxy_headers=make_headers(proxy_basic_auth=f'{username}:{password}'), - ssl_ca_certs=ssl_ca_certs + proxy_headers=make_headers(proxy_basic_auth=f"{username}:{password}"), + ssl_ca_certs=ssl_ca_certs, ) exercise_all_apis(pc, index_name) - diff --git a/tests/integration/test_upsert.py b/tests/integration/test_upsert.py index 91aa90bd..0acd3d09 100644 --- a/tests/integration/test_upsert.py +++ b/tests/integration/test_upsert.py @@ -1,53 +1,67 @@ import pytest from pinecone import Vector, SparseValues + class TestUpsert: def test_upsert_sanity(self, client, ready_sl_index, random_vector): idx = client.Index(ready_sl_index) # Tuples - idx.upsert(vectors=[('1', random_vector), ('2', random_vector), ('3', random_vector)]) + idx.upsert(vectors=[("1", random_vector), ("2", random_vector), ("3", random_vector)]) # Tuples with metadata - idx.upsert(vectors=[('4', random_vector, {'key': 'value'}), ('5', random_vector, {'key': 'value2'})]) + idx.upsert(vectors=[("4", random_vector, {"key": "value"}), ("5", random_vector, {"key": "value2"})]) # Vector objects - idx.upsert(vectors=[Vector(id='6', values=random_vector)]) - idx.upsert(vectors=[Vector(id='7', values=random_vector, metadata={'key': 'value'})]) + idx.upsert(vectors=[Vector(id="6", values=random_vector)]) + idx.upsert(vectors=[Vector(id="7", values=random_vector, metadata={"key": "value"})]) # Dict - idx.upsert(vectors=[{'id': '8', 'values': random_vector}]) + idx.upsert(vectors=[{"id": "8", "values": random_vector}]) # Dict with metadata - idx.upsert(vectors=[{'id': '8', 'values': random_vector, 'metadata': {'key': 'value'}}]) + idx.upsert(vectors=[{"id": "8", "values": random_vector, "metadata": {"key": "value"}}]) idx.describe_index_stats() def test_upsert_sparse_vectors(self, client, random_vector, create_sl_index_params, index_name): - create_sl_index_params['metric'] = 'dotproduct' - create_sl_index_params['timeout'] = 300 + create_sl_index_params["metric"] = "dotproduct" + create_sl_index_params["timeout"] = 300 client.create_index(**create_sl_index_params) idx = client.Index(index_name) - idx.upsert(vectors=[Vector(id='1', values=random_vector, sparse_values=SparseValues(values=[0.1, 0.2, 0.3], indices=[1, 2, 3]))]) - idx.upsert(vectors=[{'id': '8', 'values': random_vector, 'metadata': {'key': 'value'}, 'sparse_values': {'values': [0.1, 0.2, 0.3], 'indices': [1, 2, 3] }}]) + idx.upsert( + vectors=[ + Vector( + id="1", values=random_vector, sparse_values=SparseValues(values=[0.1, 0.2, 0.3], indices=[1, 2, 3]) + ) + ] + ) + idx.upsert( + vectors=[ + { + "id": "8", + "values": random_vector, + "metadata": {"key": "value"}, + "sparse_values": {"values": [0.1, 0.2, 0.3], "indices": [1, 2, 3]}, + } + ] + ) def test_upsert_with_invalid_vector(self, client, ready_sl_index, random_vector): idx = client.Index(ready_sl_index) with pytest.raises(TypeError): # non-vector - idx.upsert(vectors=[('1', 'invalid_vector')]) + idx.upsert(vectors=[("1", "invalid_vector")]) with pytest.raises(TypeError): # bogus metadata - idx.upsert(vectors=[('1', random_vector, 'invalid_metadata')]) + idx.upsert(vectors=[("1", random_vector, "invalid_metadata")]) with pytest.raises(TypeError): # non-string id idx.upsert(vectors=[(1, random_vector)]) with pytest.raises(TypeError): - idx.upsert(vectors=[{'id': 1, 'values': random_vector}]) - - \ No newline at end of file + idx.upsert(vectors=[{"id": 1, "values": random_vector}]) diff --git a/tests/unit/data/test_datetime_parsing.py b/tests/unit/data/test_datetime_parsing.py index cbce1bd0..82f2e6c2 100644 --- a/tests/unit/data/test_datetime_parsing.py +++ b/tests/unit/data/test_datetime_parsing.py @@ -1,13 +1,26 @@ from pinecone import Vector, Config from datetime import datetime + class TestDatetimeConversion: def test_datetimes_not_coerced(self): - vec = Vector(id='1', values=[0.1, 0.2, 0.3], metadata={'created_at': '7th of January, 2023'}, _check_type=True, _configuration=Config()) - assert vec.metadata['created_at'] == '7th of January, 2023' - assert vec.metadata['created_at'].__class__ == str + vec = Vector( + id="1", + values=[0.1, 0.2, 0.3], + metadata={"created_at": "7th of January, 2023"}, + _check_type=True, + _configuration=Config(), + ) + assert vec.metadata["created_at"] == "7th of January, 2023" + assert vec.metadata["created_at"].__class__ == str def test_dates_not_coerced(self): - vec = Vector(id='1', values=[0.1, 0.2, 0.3], metadata={'created_at': '8/12/2024'}, _check_type=True, _configuration=Config()) - assert vec.metadata['created_at'] == '8/12/2024' - assert vec.metadata['created_at'].__class__ == str + vec = Vector( + id="1", + values=[0.1, 0.2, 0.3], + metadata={"created_at": "8/12/2024"}, + _check_type=True, + _configuration=Config(), + ) + assert vec.metadata["created_at"] == "8/12/2024" + assert vec.metadata["created_at"].__class__ == str diff --git a/tests/unit/data/test_vector_factory.py b/tests/unit/data/test_vector_factory.py index 0f1a92fa..f8f4e09f 100644 --- a/tests/unit/data/test_vector_factory.py +++ b/tests/unit/data/test_vector_factory.py @@ -5,42 +5,38 @@ from pinecone.data.vector_factory import VectorFactory from pinecone import Vector, SparseValues, ListConversionException + class TestVectorFactory: def test_build_when_returns_vector_unmodified(self): - vec = Vector(id='1', values=[0.1, 0.2, 0.3]) + vec = Vector(id="1", values=[0.1, 0.2, 0.3]) assert VectorFactory.build(vec) == vec assert VectorFactory.build(vec).__class__ == Vector - @pytest.mark.parametrize('values_array', [ - [0.1, 0.2, 0.3], - np.array([0.1, 0.2, 0.3]), - pd.array([0.1, 0.2, 0.3]) - ]) + @pytest.mark.parametrize("values_array", [[0.1, 0.2, 0.3], np.array([0.1, 0.2, 0.3]), pd.array([0.1, 0.2, 0.3])]) def test_build_when_tuple_with_two_values(self, values_array): - tup = ('1', values_array) + tup = ("1", values_array) actual = VectorFactory.build(tup) - expected = Vector(id='1', values=[0.1, 0.2, 0.3], metadata={}) - assert actual == expected - - @pytest.mark.parametrize('values_array', [ - [0.1, 0.2, 0.3], - np.array([0.1, 0.2, 0.3]), - pd.array([0.1, 0.2, 0.3]) - ]) + expected = Vector(id="1", values=[0.1, 0.2, 0.3], metadata={}) + assert actual == expected + + @pytest.mark.parametrize("values_array", [[0.1, 0.2, 0.3], np.array([0.1, 0.2, 0.3]), pd.array([0.1, 0.2, 0.3])]) def test_build_when_tuple_with_three_values(self, values_array): - tup = ('1', values_array, {'genre': 'comedy'}) + tup = ("1", values_array, {"genre": "comedy"}) actual = VectorFactory.build(tup) - expected = Vector(id='1', values=[0.1, 0.2, 0.3], metadata={'genre': 'comedy'}) + expected = Vector(id="1", values=[0.1, 0.2, 0.3], metadata={"genre": "comedy"}) assert actual == expected - @pytest.mark.parametrize("vector_tup", [ - ("1", 'not an array'), - ("1", {}), - ("1", None), - ("1", 'not an array', {"genre": "comedy"}), - ("1", {}, {"genre": "comedy"}), - ("1", None, {"genre": "comedy"}), - ]) + @pytest.mark.parametrize( + "vector_tup", + [ + ("1", "not an array"), + ("1", {}), + ("1", None), + ("1", "not an array", {"genre": "comedy"}), + ("1", {}, {"genre": "comedy"}), + ("1", None, {"genre": "comedy"}), + ], + ) def test_build_when_tuple_values_must_be_list(self, vector_tup): with pytest.raises( ListConversionException, @@ -48,97 +44,172 @@ def test_build_when_tuple_values_must_be_list(self, vector_tup): ): VectorFactory.build(vector_tup) - def test_build_when_tuple_errors_when_additional_fields(self): with pytest.raises(ValueError, match="Found a tuple of length 4 which is not supported"): - tup = ('1', [0.1, 0.2, 0.3], {'a': 'b'}, 'extra') + tup = ("1", [0.1, 0.2, 0.3], {"a": "b"}, "extra") VectorFactory.build(tup) def test_build_when_tuple_too_short(self): with pytest.raises(ValueError, match="Found a tuple of length 1 which is not supported"): - tup = ('1',) + tup = ("1",) VectorFactory.build(tup) - @pytest.mark.parametrize('values_array', [ - [0.1, 0.2, 0.3], - np.array([0.1, 0.2, 0.3]), - pd.array([0.1, 0.2, 0.3]) - ]) + @pytest.mark.parametrize("values_array", [[0.1, 0.2, 0.3], np.array([0.1, 0.2, 0.3]), pd.array([0.1, 0.2, 0.3])]) def test_build_when_dict(self, values_array): - d = { 'id': '1', 'values': values_array, 'metadata': {'genre': 'comedy'}} + d = {"id": "1", "values": values_array, "metadata": {"genre": "comedy"}} actual = VectorFactory.build(d) - expected = Vector(id='1', values=[0.1, 0.2, 0.3], metadata={'genre': 'comedy'}) + expected = Vector(id="1", values=[0.1, 0.2, 0.3], metadata={"genre": "comedy"}) assert actual == expected def test_build_when_dict_missing_required_fields(self): with pytest.raises(ValueError, match="Vector dictionary is missing required fields"): - d = {'values': [0.1, 0.2, 0.3]} + d = {"values": [0.1, 0.2, 0.3]} VectorFactory.build(d) def test_build_when_dict_excess_keys(self): with pytest.raises(ValueError, match="Found excess keys in the vector dictionary"): - d = {'id': '1', 'values': [0.1, 0.2, 0.3], 'metadata': {'genre': 'comedy'}, 'extra': 'field'} + d = {"id": "1", "values": [0.1, 0.2, 0.3], "metadata": {"genre": "comedy"}, "extra": "field"} VectorFactory.build(d) def test_build_when_dict_sparse_values(self): - d = {'id': '1', 'values': [0.1, 0.2, 0.3], 'metadata': {'genre': 'comedy'}, 'sparse_values': {'indices': [0, 2], 'values': [0.1, 0.3]}} + d = { + "id": "1", + "values": [0.1, 0.2, 0.3], + "metadata": {"genre": "comedy"}, + "sparse_values": {"indices": [0, 2], "values": [0.1, 0.3]}, + } actual = VectorFactory.build(d) - expected = Vector(id='1', values=[0.1, 0.2, 0.3], metadata={'genre': 'comedy'}, sparse_values=SparseValues(indices=[0, 2], values=[0.1, 0.3])) + expected = Vector( + id="1", + values=[0.1, 0.2, 0.3], + metadata={"genre": "comedy"}, + sparse_values=SparseValues(indices=[0, 2], values=[0.1, 0.3]), + ) assert actual == expected def test_build_when_dict_sparse_values_when_SparseValues(self): - d = {'id': '1', 'values': [0.1, 0.2, 0.3], 'metadata': {'genre': 'comedy'}, 'sparse_values': SparseValues(indices=[0, 2], values=[0.1, 0.3])} + d = { + "id": "1", + "values": [0.1, 0.2, 0.3], + "metadata": {"genre": "comedy"}, + "sparse_values": SparseValues(indices=[0, 2], values=[0.1, 0.3]), + } actual = VectorFactory.build(d) - expected = Vector(id='1', values=[0.1, 0.2, 0.3], metadata={'genre': 'comedy'}, sparse_values=SparseValues(indices=[0, 2], values=[0.1, 0.3])) + expected = Vector( + id="1", + values=[0.1, 0.2, 0.3], + metadata={"genre": "comedy"}, + sparse_values=SparseValues(indices=[0, 2], values=[0.1, 0.3]), + ) assert actual == expected def test_build_when_dict_sparse_values_errors_when_not_dict(self): with pytest.raises(ValueError, match="Column `sparse_values` is expected to be a dictionary"): - d = {'id': '1', 'values': [0.1, 0.2, 0.3], 'metadata': {'genre': 'comedy'}, 'sparse_values': 'not a dict'} + d = {"id": "1", "values": [0.1, 0.2, 0.3], "metadata": {"genre": "comedy"}, "sparse_values": "not a dict"} VectorFactory.build(d) def test_build_when_dict_sparse_values_errors_when_missing_indices(self): with pytest.raises(ValueError, match="Missing required keys in data in column `sparse_values`"): - d = {'id': '1', 'values': [0.1, 0.2, 0.3], 'metadata': {'genre': 'comedy'}, 'sparse_values': {'values': [0.1, 0.3]}} + d = { + "id": "1", + "values": [0.1, 0.2, 0.3], + "metadata": {"genre": "comedy"}, + "sparse_values": {"values": [0.1, 0.3]}, + } VectorFactory.build(d) def test_build_when_dict_sparse_values_errors_when_missing_values(self): with pytest.raises(ValueError, match="Missing required keys in data in column `sparse_values`"): - d = {'id': '1', 'values': [0.1, 0.2, 0.3], 'metadata': {'genre': 'comedy'}, 'sparse_values': {'indices': [0, 2]}} + d = { + "id": "1", + "values": [0.1, 0.2, 0.3], + "metadata": {"genre": "comedy"}, + "sparse_values": {"indices": [0, 2]}, + } VectorFactory.build(d) def test_build_when_dict_sparse_values_errors_when_indices_not_list(self): with pytest.raises(ValueError, match="Found unexpected data in column `sparse_values`"): - d = {'id': '1', 'values': [0.1, 0.2, 0.3], 'metadata': {'genre': 'comedy'}, 'sparse_values': {'indices': 'not a list', 'values': [0.1, 0.3]}} + d = { + "id": "1", + "values": [0.1, 0.2, 0.3], + "metadata": {"genre": "comedy"}, + "sparse_values": {"indices": "not a list", "values": [0.1, 0.3]}, + } VectorFactory.build(d) def test_build_when_dict_sparse_values_errors_when_values_not_list(self): with pytest.raises(ValueError, match="Found unexpected data in column `sparse_values`"): - d = {'id': '1', 'values': [0.1, 0.2, 0.3], 'metadata': {'genre': 'comedy'}, 'sparse_values': {'indices': [0, 2], 'values': 'not a list'}} + d = { + "id": "1", + "values": [0.1, 0.2, 0.3], + "metadata": {"genre": "comedy"}, + "sparse_values": {"indices": [0, 2], "values": "not a list"}, + } VectorFactory.build(d) def test_build_when_dict_sparse_values_when_values_is_ndarray(self): - d = {'id': '1', 'values': [0.1, 0.2, 0.3], 'metadata': {'genre': 'comedy'}, 'sparse_values': {'indices': [0, 2], 'values': np.array([0.1, 0.3])}} + d = { + "id": "1", + "values": [0.1, 0.2, 0.3], + "metadata": {"genre": "comedy"}, + "sparse_values": {"indices": [0, 2], "values": np.array([0.1, 0.3])}, + } actual = VectorFactory.build(d) - expected = Vector(id='1', values=[0.1, 0.2, 0.3], metadata={'genre': 'comedy'}, sparse_values=SparseValues(indices=[0, 2], values=[0.1, 0.3])) + expected = Vector( + id="1", + values=[0.1, 0.2, 0.3], + metadata={"genre": "comedy"}, + sparse_values=SparseValues(indices=[0, 2], values=[0.1, 0.3]), + ) assert actual == expected def test_build_when_dict_sparse_values_when_indices_is_pandas_IntegerArray(self): - d = {'id': '1', 'values': [0.1, 0.2, 0.3], 'metadata': {'genre': 'comedy'}, 'sparse_values': {'indices': pd.array([0, 2]), 'values': [0.1, 0.3]}} + d = { + "id": "1", + "values": [0.1, 0.2, 0.3], + "metadata": {"genre": "comedy"}, + "sparse_values": {"indices": pd.array([0, 2]), "values": [0.1, 0.3]}, + } actual = VectorFactory.build(d) - expected = Vector(id='1', values=[0.1, 0.2, 0.3], metadata={'genre': 'comedy'}, sparse_values=SparseValues(indices=[0, 2], values=[0.1, 0.3])) + expected = Vector( + id="1", + values=[0.1, 0.2, 0.3], + metadata={"genre": "comedy"}, + sparse_values=SparseValues(indices=[0, 2], values=[0.1, 0.3]), + ) assert actual == expected def test_build_when_dict_sparse_values_when_values_is_pandas_FloatingArray(self): - d = {'id': '1', 'values': [0.1, 0.2, 0.3], 'metadata': {'genre': 'comedy'}, 'sparse_values': {'indices': [0, 2], 'values': pd.array([0.1, 0.3])}} + d = { + "id": "1", + "values": [0.1, 0.2, 0.3], + "metadata": {"genre": "comedy"}, + "sparse_values": {"indices": [0, 2], "values": pd.array([0.1, 0.3])}, + } actual = VectorFactory.build(d) - expected = Vector(id='1', values=[0.1, 0.2, 0.3], metadata={'genre': 'comedy'}, sparse_values=SparseValues(indices=[0, 2], values=[0.1, 0.3])) + expected = Vector( + id="1", + values=[0.1, 0.2, 0.3], + metadata={"genre": "comedy"}, + sparse_values=SparseValues(indices=[0, 2], values=[0.1, 0.3]), + ) assert actual == expected def test_build_when_dict_sparse_values_when_indices_is_ndarray(self): - d = {'id': '1', 'values': [0.1, 0.2, 0.3], 'metadata': {'genre': 'comedy'}, 'sparse_values': {'indices': np.array([0, 2]), 'values': [0.1, 0.3]}} + d = { + "id": "1", + "values": [0.1, 0.2, 0.3], + "metadata": {"genre": "comedy"}, + "sparse_values": {"indices": np.array([0, 2]), "values": [0.1, 0.3]}, + } actual = VectorFactory.build(d) - expected = Vector(id='1', values=[0.1, 0.2, 0.3], metadata={'genre': 'comedy'}, sparse_values=SparseValues(indices=[0, 2], values=[0.1, 0.3])) + expected = Vector( + id="1", + values=[0.1, 0.2, 0.3], + metadata={"genre": "comedy"}, + sparse_values=SparseValues(indices=[0, 2], values=[0.1, 0.3]), + ) assert actual == expected def test_build_when_errors_when_other_type(self): @@ -146,12 +217,7 @@ def test_build_when_errors_when_other_type(self): VectorFactory.build(1) def test_build_when_sparse_values_is_None(self): - d = { - 'id': '1', - 'values': [0.1, 0.2, 0.3], - 'metadata': {'genre': 'comedy'}, - 'sparse_values': None - } + d = {"id": "1", "values": [0.1, 0.2, 0.3], "metadata": {"genre": "comedy"}, "sparse_values": None} actual = VectorFactory.build(d) - expected = Vector(id='1', values=[0.1, 0.2, 0.3], metadata={'genre': 'comedy'}) - assert actual == expected \ No newline at end of file + expected = Vector(id="1", values=[0.1, 0.2, 0.3], metadata={"genre": "comedy"}) + assert actual == expected diff --git a/tests/unit/models/test_collection_list.py b/tests/unit/models/test_collection_list.py index 8c34d368..22e1b028 100644 --- a/tests/unit/models/test_collection_list.py +++ b/tests/unit/models/test_collection_list.py @@ -2,14 +2,30 @@ from pinecone import CollectionList from pinecone.core.client.models import CollectionList as OpenApiCollectionList, CollectionModel + @pytest.fixture def collection_list_response(): return OpenApiCollectionList( - collections=[ - CollectionModel(name='collection1', size=10000, status='Ready', dimension=1536, record_count=1000, environment='us-west1-gcp'), - CollectionModel(name='collection2', size=20000, status='Ready', dimension=256, record_count=2000, environment='us-west1-gcp'), - ], - ) + collections=[ + CollectionModel( + name="collection1", + size=10000, + status="Ready", + dimension=1536, + record_count=1000, + environment="us-west1-gcp", + ), + CollectionModel( + name="collection2", + size=20000, + status="Ready", + dimension=256, + record_count=2000, + environment="us-west1-gcp", + ), + ], + ) + class TestCollectionList: def test_collection_list_has_length(self, collection_list_response): @@ -17,7 +33,7 @@ def test_collection_list_has_length(self, collection_list_response): def test_collection_list_is_(self, collection_list_response): icl = CollectionList(collection_list_response) - assert [i['name'] for i in icl] == ['collection1', 'collection2'] + assert [i["name"] for i in icl] == ["collection1", "collection2"] assert [i.record_count for i in icl] == [1000, 2000] def test_collection_list_getitem(self, collection_list_response): @@ -27,11 +43,13 @@ def test_collection_list_getitem(self, collection_list_response): def test_collection_list_proxies_methods(self, collection_list_response): # Forward compatibility, in case we add more attributes to IndexList for pagination - assert CollectionList(collection_list_response).collection_list.collections == collection_list_response.collections + assert ( + CollectionList(collection_list_response).collection_list.collections == collection_list_response.collections + ) def test_when_results_are_empty(self): assert len(CollectionList(OpenApiCollectionList(collections=[]))) == 0 def test_collection_list_names_syntactic_sugar(self, collection_list_response): icl = CollectionList(collection_list_response) - assert icl.names() == ['collection1', 'collection2'] \ No newline at end of file + assert icl.names() == ["collection1", "collection2"] diff --git a/tests/unit/models/test_index_list.py b/tests/unit/models/test_index_list.py index 8033131f..1bcc6d2f 100644 --- a/tests/unit/models/test_index_list.py +++ b/tests/unit/models/test_index_list.py @@ -2,37 +2,29 @@ from pinecone import IndexList from pinecone.core.client.models import IndexList as OpenApiIndexList + @pytest.fixture def index_list_response(): return OpenApiIndexList( - indexes=[ - { - "name": "test-index-1", - "dimension": 2, - "metric": "cosine", - "spec": { - "pod": { - "environment": "us-west1-gcp", - "pod_type": "p1.x1", - "pods": 1, - "replicas": 1, - "shards": 1 - } - } - }, - { - "name": "test-index-2", - "dimension": 3, - "metric": "cosine", - "spec": { - "serverless": { - "cloud": "aws", - "region": "us-west-2" - } - } + indexes=[ + { + "name": "test-index-1", + "dimension": 2, + "metric": "cosine", + "spec": { + "pod": {"environment": "us-west1-gcp", "pod_type": "p1.x1", "pods": 1, "replicas": 1, "shards": 1} }, - ], _check_type=False - ) + }, + { + "name": "test-index-2", + "dimension": 3, + "metric": "cosine", + "spec": {"serverless": {"cloud": "aws", "region": "us-west-2"}}, + }, + ], + _check_type=False, + ) + class TestIndexList: def test_index_list_has_length(self, index_list_response): @@ -40,13 +32,13 @@ def test_index_list_has_length(self, index_list_response): def test_index_list_is(self, index_list_response): iil = IndexList(index_list_response) - assert [i['name'] for i in iil] == ['test-index-1', 'test-index-2'] - assert [i['dimension'] for i in iil] == [2, 3] - assert [i['metric'] for i in iil] == ['cosine', 'cosine'] + assert [i["name"] for i in iil] == ["test-index-1", "test-index-2"] + assert [i["dimension"] for i in iil] == [2, 3] + assert [i["metric"] for i in iil] == ["cosine", "cosine"] def test_index_list_names_syntactic_sugar(self, index_list_response): iil = IndexList(index_list_response) - assert iil.names() == ['test-index-1', 'test-index-2'] + assert iil.names() == ["test-index-1", "test-index-2"] def test_index_list_getitem(self, index_list_response): iil = IndexList(index_list_response) @@ -63,4 +55,3 @@ def test_when_results_are_empty(self): assert iil.index_list.indexes == [] assert iil.indexes == [] assert iil.names() == [] - diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index cf3996e1..6eaf4f48 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -8,6 +8,7 @@ from urllib3 import make_headers + class TestConfig: @pytest.fixture(autouse=True) def run_before_and_after_tests(tmpdir): @@ -16,7 +17,12 @@ def run_before_and_after_tests(tmpdir): # Defend against unexpected env vars. Since we clear these variables below # after each test execution, these should only be raised if there is # test pollution in the environment coming from some other test file/setup. - known_env_vars = ["PINECONE_API_KEY", "PINECONE_ENVIRONMENT", "PINECONE_CONTROLLER_HOST", "PINECONE_ADDITIONAL_HEADERS"] + known_env_vars = [ + "PINECONE_API_KEY", + "PINECONE_ENVIRONMENT", + "PINECONE_CONTROLLER_HOST", + "PINECONE_ADDITIONAL_HEADERS", + ] for var in known_env_vars: if os.getenv(var): raise ValueError(f"Unexpected env var {var} found in environment. Check for test pollution.") @@ -51,15 +57,17 @@ def test_init_with_positional_args(self): def test_init_with_kwargs(self): api_key = "my-api-key" controller_host = "my-controller-host" - ssl_ca_cert = 'path/to/cert-bundle.pem' + ssl_ca_cert = "path/to/cert-bundle.pem" openapi_config = OpenApiConfiguration() - config = PineconeConfig.build(api_key=api_key, host=controller_host, ssl_ca_certs=ssl_ca_cert, openapi_config=openapi_config) + config = PineconeConfig.build( + api_key=api_key, host=controller_host, ssl_ca_certs=ssl_ca_cert, openapi_config=openapi_config + ) assert config.api_key == api_key - assert config.host == 'https://' + controller_host - assert config.ssl_ca_certs == 'path/to/cert-bundle.pem' + assert config.host == "https://" + controller_host + assert config.ssl_ca_certs == "path/to/cert-bundle.pem" def test_resolution_order_kwargs_over_env_vars(self): """ @@ -77,71 +85,70 @@ def test_resolution_order_kwargs_over_env_vars(self): config = PineconeConfig.build(api_key=api_key, host=controller_host, additional_headers=additional_headers) assert config.api_key == api_key - assert config.host == 'https://' + controller_host + assert config.host == "https://" + controller_host assert config.additional_headers == additional_headers def test_errors_when_no_api_key_is_present(self): with pytest.raises(PineconeConfigurationError): PineconeConfig.build() - + def test_config_pool_threads(self): pc = Pinecone(api_key="test-api-key", host="test-controller-host", pool_threads=10) assert pc.index_api.api_client.pool_threads == 10 - idx = pc.Index(host='my-index-host', name='my-index-name') + idx = pc.Index(host="my-index-host", name="my-index-name") assert idx._vector_api.api_client.pool_threads == 10 - + def test_config_when_openapi_config_is_passed_merges_api_key(self): oai_config = OpenApiConfiguration() - pc = Pinecone(api_key='asdf', openapi_config=oai_config) - assert pc.openapi_config.api_key == {'ApiKeyAuth': 'asdf'} + pc = Pinecone(api_key="asdf", openapi_config=oai_config) + assert pc.openapi_config.api_key == {"ApiKeyAuth": "asdf"} def test_ssl_config_passed_to_index_client(self): oai_config = OpenApiConfiguration() - oai_config.ssl_ca_cert = 'path/to/cert' - proxy_headers = make_headers(proxy_basic_auth='asdf') + oai_config.ssl_ca_cert = "path/to/cert" + proxy_headers = make_headers(proxy_basic_auth="asdf") oai_config.proxy_headers = proxy_headers - - pc = Pinecone(api_key='key', openapi_config=oai_config) - assert pc.openapi_config.ssl_ca_cert == 'path/to/cert' + pc = Pinecone(api_key="key", openapi_config=oai_config) + + assert pc.openapi_config.ssl_ca_cert == "path/to/cert" assert pc.openapi_config.proxy_headers == proxy_headers - idx = pc.Index(host='host') - assert idx._vector_api.api_client.configuration.ssl_ca_cert == 'path/to/cert' + idx = pc.Index(host="host") + assert idx._vector_api.api_client.configuration.ssl_ca_cert == "path/to/cert" assert idx._vector_api.api_client.configuration.proxy_headers == proxy_headers def test_host_config_not_clobbered_by_index(self): oai_config = OpenApiConfiguration() - oai_config.ssl_ca_cert = 'path/to/cert' - proxy_headers = make_headers(proxy_basic_auth='asdf') + oai_config.ssl_ca_cert = "path/to/cert" + proxy_headers = make_headers(proxy_basic_auth="asdf") oai_config.proxy_headers = proxy_headers - - pc = Pinecone(api_key='key', openapi_config=oai_config) - assert pc.openapi_config.ssl_ca_cert == 'path/to/cert' + pc = Pinecone(api_key="key", openapi_config=oai_config) + + assert pc.openapi_config.ssl_ca_cert == "path/to/cert" assert pc.openapi_config.proxy_headers == proxy_headers - assert pc.openapi_config.host == 'https://api.pinecone.io' + assert pc.openapi_config.host == "https://api.pinecone.io" - idx = pc.Index(host='host') - assert idx._vector_api.api_client.configuration.ssl_ca_cert == 'path/to/cert' + idx = pc.Index(host="host") + assert idx._vector_api.api_client.configuration.ssl_ca_cert == "path/to/cert" assert idx._vector_api.api_client.configuration.proxy_headers == proxy_headers - assert idx._vector_api.api_client.configuration.host == 'https://host' + assert idx._vector_api.api_client.configuration.host == "https://host" - assert pc.openapi_config.host == 'https://api.pinecone.io' + assert pc.openapi_config.host == "https://api.pinecone.io" def test_proxy_config(self): pc = Pinecone( - api_key='asdf', - proxy_url='http://localhost:8080', - ssl_ca_certs='path/to/cert-bundle.pem', + api_key="asdf", + proxy_url="http://localhost:8080", + ssl_ca_certs="path/to/cert-bundle.pem", ) - assert pc.config.proxy_url == 'http://localhost:8080' - assert pc.config.ssl_ca_certs == 'path/to/cert-bundle.pem' + assert pc.config.proxy_url == "http://localhost:8080" + assert pc.config.ssl_ca_certs == "path/to/cert-bundle.pem" - assert pc.openapi_config.proxy == 'http://localhost:8080' - assert pc.openapi_config.ssl_ca_cert == 'path/to/cert-bundle.pem' + assert pc.openapi_config.proxy == "http://localhost:8080" + assert pc.openapi_config.ssl_ca_cert == "path/to/cert-bundle.pem" - assert pc.index_api.api_client.configuration.proxy == 'http://localhost:8080' - assert pc.index_api.api_client.configuration.ssl_ca_cert == 'path/to/cert-bundle.pem' - \ No newline at end of file + assert pc.index_api.api_client.configuration.proxy == "http://localhost:8080" + assert pc.index_api.api_client.configuration.ssl_ca_cert == "path/to/cert-bundle.pem" diff --git a/tests/unit/test_config_builder.py b/tests/unit/test_config_builder.py index d651595f..84d3c7dd 100644 --- a/tests/unit/test_config_builder.py +++ b/tests/unit/test_config_builder.py @@ -4,6 +4,7 @@ from pinecone.config import ConfigBuilder from pinecone import PineconeConfigurationError + class TestConfigBuilder: def test_build_simple(self): config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host") @@ -13,9 +14,7 @@ def test_build_simple(self): def test_build_merges_key_and_host_when_openapi_config_provided(self): config = ConfigBuilder.build( - api_key="my-api-key", - host="https://my-controller-host", - openapi_config=OpenApiConfiguration() + api_key="my-api-key", host="https://my-controller-host", openapi_config=OpenApiConfiguration() ) assert config.api_key == "my-api-key" assert config.host == "https://my-controller-host" @@ -23,8 +22,8 @@ def test_build_merges_key_and_host_when_openapi_config_provided(self): def test_build_with_source_tag(self): config = ConfigBuilder.build( - api_key="my-api-key", - host="https://my-controller-host", + api_key="my-api-key", + host="https://my-controller-host", source_tag="my-source-tag", ) assert config.api_key == "my-api-key" @@ -39,7 +38,7 @@ def test_build_errors_when_no_api_key_is_present(self): def test_build_errors_when_no_host_is_present(self): with pytest.raises(PineconeConfigurationError) as e: - ConfigBuilder.build(api_key='my-api-key') + ConfigBuilder.build(api_key="my-api-key") assert str(e.value) == "You haven't specified a host." def test_build_openapi_config(self): @@ -49,35 +48,28 @@ def test_build_openapi_config(self): assert openapi_config.api_key == {"ApiKeyAuth": "my-api-key"} def test_build_openapi_config_merges_with_existing_config(self): - config = ConfigBuilder.build( - api_key="my-api-key", - host="https://my-controller-host" - ) + config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host") openapi_config = OpenApiConfiguration() openapi_config.ssl_ca_cert = "path/to/bundle" - openapi_config.proxy = 'http://my-proxy:8080' + openapi_config.proxy = "http://my-proxy:8080" openapi_config = ConfigBuilder.build_openapi_config(config, openapi_config) assert openapi_config.api_key == {"ApiKeyAuth": "my-api-key"} assert openapi_config.host == "https://my-controller-host" assert openapi_config.ssl_ca_cert == "path/to/bundle" - assert openapi_config.proxy == 'http://my-proxy:8080' + assert openapi_config.proxy == "http://my-proxy:8080" def test_build_openapi_config_does_not_mutate_input(self): - config = ConfigBuilder.build( - api_key="my-api-key", - host="foo", - ssl_ca_certs="path/to/bundle.foo" - ) + config = ConfigBuilder.build(api_key="my-api-key", host="foo", ssl_ca_certs="path/to/bundle.foo") input_openapi_config = OpenApiConfiguration() - input_openapi_config.host = 'bar' + input_openapi_config.host = "bar" input_openapi_config.ssl_ca_cert = "asdfasdf" openapi_config = ConfigBuilder.build_openapi_config(config, input_openapi_config) assert openapi_config.host == "https://foo" assert openapi_config.ssl_ca_cert == "path/to/bundle.foo" - assert input_openapi_config.host == 'bar' - assert input_openapi_config.ssl_ca_cert == "asdfasdf" \ No newline at end of file + assert input_openapi_config.host == "bar" + assert input_openapi_config.ssl_ca_cert == "asdfasdf" diff --git a/tests/unit/test_control.py b/tests/unit/test_control.py index d7186cdf..228e20d6 100644 --- a/tests/unit/test_control.py +++ b/tests/unit/test_control.py @@ -8,25 +8,52 @@ import time + @pytest.fixture def index_list_response(): - return IndexList(indexes=[ - IndexModel(name="index1", dimension=10, metric="euclidean", host="asdf", status={"ready": True}, spec={}, _check_type=False), - IndexModel(name="index2", dimension=10, metric="euclidean", host="asdf", status={"ready": True}, spec={}, _check_type=False), - IndexModel(name="index3", dimension=10, metric="euclidean", host="asdf", status={"ready": True}, spec={}, _check_type=False), + return IndexList( + indexes=[ + IndexModel( + name="index1", + dimension=10, + metric="euclidean", + host="asdf", + status={"ready": True}, + spec={}, + _check_type=False, + ), + IndexModel( + name="index2", + dimension=10, + metric="euclidean", + host="asdf", + status={"ready": True}, + spec={}, + _check_type=False, + ), + IndexModel( + name="index3", + dimension=10, + metric="euclidean", + host="asdf", + status={"ready": True}, + spec={}, + _check_type=False, + ), + ] + ) - ]) class TestControl: def test_plugins_are_installed(self): - with patch('pinecone.control.pinecone.install_plugins') as mock_install_plugins: - p = Pinecone(api_key='asdf') + with patch("pinecone.control.pinecone.install_plugins") as mock_install_plugins: + p = Pinecone(api_key="asdf") mock_install_plugins.assert_called_once() - + def test_bad_plugin_doesnt_break_sdk(self): - with patch('pinecone.control.pinecone.install_plugins', side_effect=Exception("bad plugin")): + with patch("pinecone.control.pinecone.install_plugins", side_effect=Exception("bad plugin")): try: - p = Pinecone(api_key='asdf') + p = Pinecone(api_key="asdf") except Exception as e: assert False, f"Unexpected exception: {e}" @@ -44,7 +71,7 @@ def test_passing_additional_headers(self): for key, value in extras.items(): assert p.index_api.api_client.default_headers[key] == value - assert 'User-Agent' in p.index_api.api_client.default_headers + assert "User-Agent" in p.index_api.api_client.default_headers assert len(p.index_api.api_client.default_headers) == 3 def test_overwrite_useragent(self): @@ -52,7 +79,7 @@ def test_overwrite_useragent(self): # when embedding the client in other pinecone tools such as canopy. extras = {"User-Agent": "test-user-agent"} p = Pinecone(api_key="123-456-789", additional_headers=extras) - assert p.index_api.api_client.default_headers['User-Agent'] == 'test-user-agent' + assert p.index_api.api_client.default_headers["User-Agent"] == "test-user-agent" assert len(p.index_api.api_client.default_headers) == 1 def test_set_source_tag_in_useragent(self): @@ -60,53 +87,61 @@ def test_set_source_tag_in_useragent(self): assert re.search(r"source_tag=test_source_tag", p.index_api.api_client.user_agent) is not None def test_set_source_tag_in_useragent_via_config(self): - config = ConfigBuilder.build(api_key='YOUR_API_KEY', host='https://my-host', source_tag='my_source_tag') + config = ConfigBuilder.build(api_key="YOUR_API_KEY", host="https://my-host", source_tag="my_source_tag") p = Pinecone(config=config) assert re.search(r"source_tag=my_source_tag", p.index_api.api_client.user_agent) is not None - @pytest.mark.parametrize("timeout_value, describe_index_responses, expected_describe_index_calls, expected_sleep_calls", [ - # When timeout=None, describe_index is called until ready - (None, [{ "status": {"ready": False}}, {"status": {"ready": True}}], 2, 1), - - # Timeout of 10 seconds, describe_index called 3 times, sleep twice - (10, [{"status": {"ready": False}}, {"status": {"ready": False}}, {"status": {"ready": True}}], 3, 2), - - # When timeout=-1, create_index returns immediately without calling describe_index or sleep - (-1, [{"status": {"ready": False}}], 0, 0), - ]) - def test_create_index_with_timeout(self, mocker, timeout_value, describe_index_responses, expected_describe_index_calls, expected_sleep_calls): + @pytest.mark.parametrize( + "timeout_value, describe_index_responses, expected_describe_index_calls, expected_sleep_calls", + [ + # When timeout=None, describe_index is called until ready + (None, [{"status": {"ready": False}}, {"status": {"ready": True}}], 2, 1), + # Timeout of 10 seconds, describe_index called 3 times, sleep twice + (10, [{"status": {"ready": False}}, {"status": {"ready": False}}, {"status": {"ready": True}}], 3, 2), + # When timeout=-1, create_index returns immediately without calling describe_index or sleep + (-1, [{"status": {"ready": False}}], 0, 0), + ], + ) + def test_create_index_with_timeout( + self, mocker, timeout_value, describe_index_responses, expected_describe_index_calls, expected_sleep_calls + ): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.index_api, 'describe_index', side_effect=describe_index_responses) - mocker.patch.object(p.index_api, 'create_index') - mocker.patch('time.sleep') + mocker.patch.object(p.index_api, "describe_index", side_effect=describe_index_responses) + mocker.patch.object(p.index_api, "create_index") + mocker.patch("time.sleep") - p.create_index(name="my-index", dimension=10, spec=ServerlessSpec(cloud="aws", region="us-west1"), timeout=timeout_value) + p.create_index( + name="my-index", dimension=10, spec=ServerlessSpec(cloud="aws", region="us-west1"), timeout=timeout_value + ) assert p.index_api.create_index.call_count == 1 assert p.index_api.describe_index.call_count == expected_describe_index_calls assert time.sleep.call_count == expected_sleep_calls - @pytest.mark.parametrize("timeout_value, describe_index_responses, expected_describe_index_calls, expected_sleep_calls", [ - # When timeout=None, describe_index is called until ready - (None, [{ "status": {"ready": False}}, {"status": {"ready": True}}], 2, 1), - - # Timeout of 10 seconds, describe_index called 3 times, sleep twice - (10, [{"status": {"ready": False}}, {"status": {"ready": False}}, {"status": {"ready": True}}], 3, 2), - - # When timeout=-1, create_index returns immediately without calling describe_index or sleep - (-1, [{"status": {"ready": False}}], 0, 0), - ]) - def test_create_index_from_source_collection(self, mocker, timeout_value, describe_index_responses, expected_describe_index_calls, expected_sleep_calls): + @pytest.mark.parametrize( + "timeout_value, describe_index_responses, expected_describe_index_calls, expected_sleep_calls", + [ + # When timeout=None, describe_index is called until ready + (None, [{"status": {"ready": False}}, {"status": {"ready": True}}], 2, 1), + # Timeout of 10 seconds, describe_index called 3 times, sleep twice + (10, [{"status": {"ready": False}}, {"status": {"ready": False}}, {"status": {"ready": True}}], 3, 2), + # When timeout=-1, create_index returns immediately without calling describe_index or sleep + (-1, [{"status": {"ready": False}}], 0, 0), + ], + ) + def test_create_index_from_source_collection( + self, mocker, timeout_value, describe_index_responses, expected_describe_index_calls, expected_sleep_calls + ): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.index_api, 'describe_index', side_effect=describe_index_responses) - mocker.patch.object(p.index_api, 'create_index') - mocker.patch('time.sleep') + mocker.patch.object(p.index_api, "describe_index", side_effect=describe_index_responses) + mocker.patch.object(p.index_api, "create_index") + mocker.patch("time.sleep") p.create_index( - name="my-index", - dimension=10, - spec=PodSpec(environment='us-east1-gcp', source_collection="my-collection"), - timeout=timeout_value + name="my-index", + dimension=10, + spec=PodSpec(environment="us-east1-gcp", source_collection="my-collection"), + timeout=timeout_value, ) assert p.index_api.create_index.call_count == 1 @@ -116,18 +151,18 @@ def test_create_index_from_source_collection(self, mocker, timeout_value, descri def test_create_index_when_timeout_exceeded(self, mocker): with pytest.raises(TimeoutError): p = Pinecone(api_key="123-456-789") - mocker.patch.object(p.index_api, 'create_index') + mocker.patch.object(p.index_api, "create_index") describe_index_response = [{"status": {"ready": False}}] * 5 - mocker.patch.object(p.index_api, 'describe_index', side_effect=describe_index_response) - mocker.patch('time.sleep') + mocker.patch.object(p.index_api, "describe_index", side_effect=describe_index_response) + mocker.patch("time.sleep") p.create_index(name="my-index", dimension=10, timeout=10, spec=PodSpec(environment="us-west1-gcp")) def test_list_indexes_returns_iterable(self, mocker, index_list_response): p = Pinecone(api_key="123-456-789") - - mocker.patch.object(p.index_api, 'list_indexes', side_effect=[index_list_response]) + + mocker.patch.object(p.index_api, "list_indexes", side_effect=[index_list_response]) response = p.list_indexes() assert [i.name for i in response] == ["index1", "index2", "index3"] @@ -136,25 +171,24 @@ def test_api_key_and_openapi_config(self, mocker): p = Pinecone(api_key="123", openapi_config=OpenApiConfiguration.get_default_copy()) assert p.config.api_key == "123" + class TestIndexConfig: def test_default_pool_threads(self): pc = Pinecone(api_key="123-456-789") - index = pc.Index(host='my-host.svg.pinecone.io') + index = pc.Index(host="my-host.svg.pinecone.io") assert index._vector_api.api_client.pool_threads == 1 def test_pool_threads_when_indexapi_passed(self): pc = Pinecone(api_key="123-456-789", pool_threads=2, index_api=ManageIndexesApi()) - index = pc.Index(host='my-host.svg.pinecone.io') + index = pc.Index(host="my-host.svg.pinecone.io") assert index._vector_api.api_client.pool_threads == 2 def test_target_index_with_pool_threads_inherited(self): - pc = Pinecone(api_key="123-456-789", pool_threads=10, foo='bar') - index = pc.Index(host='my-host.svg.pinecone.io') + pc = Pinecone(api_key="123-456-789", pool_threads=10, foo="bar") + index = pc.Index(host="my-host.svg.pinecone.io") assert index._vector_api.api_client.pool_threads == 10 def test_target_index_with_pool_threads_kwarg(self): pc = Pinecone(api_key="123-456-789", pool_threads=10) - index = pc.Index(host='my-host.svg.pinecone.io', pool_threads=5) + index = pc.Index(host="my-host.svg.pinecone.io", pool_threads=5) assert index._vector_api.api_client.pool_threads == 5 - - diff --git a/tests/unit/test_index.py b/tests/unit/test_index.py index 5b1086e5..4bae22ff 100644 --- a/tests/unit/test_index.py +++ b/tests/unit/test_index.py @@ -7,6 +7,7 @@ from pinecone import UpsertRequest, Vector from pinecone import DescribeIndexStatsRequest, ScoredVector, QueryResponse, UpsertResponse, SparseValues + class TestRestIndex: def setup_method(self): self.vector_dim = 8 @@ -28,10 +29,10 @@ def setup_method(self): self.svv2 = [0.1, 0.2, 0.3] self.sv2 = {"indices": self.svi2, "values": self.svv2} - self.index = Index(api_key='asdf', host='https://test.pinecone.io') - + self.index = Index(api_key="asdf", host="https://test.pinecone.io") + # region: upsert tests - + def test_upsert_tuplesOfIdVec_UpserWithoutMD(self, mocker): mocker.patch.object(self.index._vector_api, "upsert", autospec=True) self.index.upsert([("vec1", self.vals1), ("vec2", self.vals2)], namespace="ns") @@ -377,7 +378,10 @@ def test_query_rejects_both_id_and_vector(self): def test_query_with_positional_args(self, mocker): with pytest.raises(ValueError) as e: self.index.query([0.1, 0.2, 0.3], top_k=10) - assert "The argument order for `query()` has changed; please use keyword arguments instead of positional arguments" in str(e.value) + assert ( + "The argument order for `query()` has changed; please use keyword arguments instead of positional arguments" + in str(e.value) + ) # endregion diff --git a/tests/unit/test_index_initialization.py b/tests/unit/test_index_initialization.py index 4fc12500..6af8dbcf 100644 --- a/tests/unit/test_index_initialization.py +++ b/tests/unit/test_index_initialization.py @@ -2,64 +2,47 @@ import re from pinecone import ConfigBuilder, Pinecone -class TestIndexClientInitialization(): - @pytest.mark.parametrize( - 'additional_headers', - [ - None, - {} - ] - ) + +class TestIndexClientInitialization: + @pytest.mark.parametrize("additional_headers", [None, {}]) def test_no_additional_headers_leaves_useragent_only(self, additional_headers): - pc = Pinecone(api_key='YOUR_API_KEY') - index = pc.Index(host='myhost', additional_headers=additional_headers) + pc = Pinecone(api_key="YOUR_API_KEY") + index = pc.Index(host="myhost", additional_headers=additional_headers) assert len(index._vector_api.api_client.default_headers) == 1 - assert 'User-Agent' in index._vector_api.api_client.default_headers - assert 'python-client-' in index._vector_api.api_client.default_headers['User-Agent'] + assert "User-Agent" in index._vector_api.api_client.default_headers + assert "python-client-" in index._vector_api.api_client.default_headers["User-Agent"] def test_additional_headers_one_additional(self): - pc = Pinecone(api_key='YOUR_API_KEY') - index = pc.Index( - host='myhost', - additional_headers={'test-header': 'test-header-value'} - ) - assert 'test-header' in index._vector_api.api_client.default_headers + pc = Pinecone(api_key="YOUR_API_KEY") + index = pc.Index(host="myhost", additional_headers={"test-header": "test-header-value"}) + assert "test-header" in index._vector_api.api_client.default_headers assert len(index._vector_api.api_client.default_headers) == 2 def test_multiple_additional_headers(self): - pc = Pinecone(api_key='YOUR_API_KEY') + pc = Pinecone(api_key="YOUR_API_KEY") index = pc.Index( - host='myhost', - additional_headers={ - 'test-header': 'test-header-value', - 'test-header2': 'test-header-value2' - } + host="myhost", additional_headers={"test-header": "test-header-value", "test-header2": "test-header-value2"} ) - assert 'test-header' in index._vector_api.api_client.default_headers - assert 'test-header2' in index._vector_api.api_client.default_headers + assert "test-header" in index._vector_api.api_client.default_headers + assert "test-header2" in index._vector_api.api_client.default_headers assert len(index._vector_api.api_client.default_headers) == 3 def test_overwrite_useragent(self): # This doesn't seem like a common use case, but we may want to allow this # when embedding the client in other pinecone tools such as canopy. - pc = Pinecone(api_key='YOUR_API_KEY') - index = pc.Index( - host='myhost', - additional_headers={ - 'User-Agent': 'test-user-agent' - } - ) + pc = Pinecone(api_key="YOUR_API_KEY") + index = pc.Index(host="myhost", additional_headers={"User-Agent": "test-user-agent"}) assert len(index._vector_api.api_client.default_headers) == 1 - assert 'User-Agent' in index._vector_api.api_client.default_headers - assert index._vector_api.api_client.default_headers['User-Agent'] == 'test-user-agent' + assert "User-Agent" in index._vector_api.api_client.default_headers + assert index._vector_api.api_client.default_headers["User-Agent"] == "test-user-agent" def test_set_source_tag(self): pc = Pinecone(api_key="123-456-789", source_tag="test_source_tag") - index = pc.Index(host='myhost') + index = pc.Index(host="myhost") assert re.search(r"source_tag=test_source_tag", pc.index_api.api_client.user_agent) is not None def test_set_source_tag_via_config(self): - config = ConfigBuilder.build(api_key='YOUR_API_KEY', host='https://my-host', source_tag='my_source_tag') + config = ConfigBuilder.build(api_key="YOUR_API_KEY", host="https://my-host", source_tag="my_source_tag") pc = Pinecone(config=config) - index = pc.Index(host='myhost') + index = pc.Index(host="myhost") assert re.search(r"source_tag=my_source_tag", pc.index_api.api_client.user_agent) is not None diff --git a/tests/unit/test_langchain_helpful_errors.py b/tests/unit/test_langchain_helpful_errors.py index 006572c3..ad87d755 100644 --- a/tests/unit/test_langchain_helpful_errors.py +++ b/tests/unit/test_langchain_helpful_errors.py @@ -1,7 +1,8 @@ import pytest from pinecone import Pinecone -class TestLangchainErrorMessages(): + +class TestLangchainErrorMessages: def test_error_from_texts_positional_args(self): with pytest.raises(AttributeError) as e: Pinecone.from_texts("texts", "id") @@ -16,4 +17,3 @@ def test_error_from_documents(self): with pytest.raises(AttributeError) as e: Pinecone.from_documents("documents", "id") assert "from_documents is not a top-level attribute of the Pinecone class" in str(e.value) - diff --git a/tests/unit/test_version.py b/tests/unit/test_version.py index 99f7cafc..c9444aaa 100644 --- a/tests/unit/test_version.py +++ b/tests/unit/test_version.py @@ -1,5 +1,6 @@ import re import pinecone + def test_version(): - assert re.search(r"\d+\.\d+\.\d+", pinecone.__version__) is not None \ No newline at end of file + assert re.search(r"\d+\.\d+\.\d+", pinecone.__version__) is not None diff --git a/tests/unit/utils/test_convert_to_list.py b/tests/unit/utils/test_convert_to_list.py index 8107c125..6699493f 100644 --- a/tests/unit/utils/test_convert_to_list.py +++ b/tests/unit/utils/test_convert_to_list.py @@ -4,6 +4,7 @@ import numpy as np import pandas as pd + def test_convert_to_list_when_numpy_array(): obj = np.array([1, 2, 3]) actual = convert_to_list(obj) @@ -11,6 +12,7 @@ def test_convert_to_list_when_numpy_array(): assert actual == expected assert actual[0].__class__ == expected[0].__class__ + def test_convert_to_list_when_pandas_array(): obj = pd.array([1, 2, 3]) actual = convert_to_list(obj) @@ -18,6 +20,7 @@ def test_convert_to_list_when_pandas_array(): assert actual == expected assert actual[0].__class__ == expected[0].__class__ + def test_convert_to_list_when_pandas_float_array(): obj = pd.array([0.1, 0.2, 0.3]) actual = convert_to_list(obj) @@ -25,6 +28,7 @@ def test_convert_to_list_when_pandas_float_array(): assert actual == expected assert actual[0].__class__ == expected[0].__class__ + def test_convert_to_list_when_pandas_series(): obj = pd.Series([1, 2, 3]) actual = convert_to_list(obj) @@ -32,28 +36,21 @@ def test_convert_to_list_when_pandas_series(): assert actual == expected assert actual[0].__class__ == expected[0].__class__ + def test_convert_to_list_when_already_list(): obj = [1, 2, 3] actual = convert_to_list(obj) expected = [1, 2, 3] assert actual == expected -@pytest.mark.parametrize("input", [ - "", - "not a list", - {} -]) + +@pytest.mark.parametrize("input", ["", "not a list", {}]) def test_invalid_iterable_inputs(input): with pytest.raises(TypeError, match="Expected a list or list-like data structure"): convert_to_list(input) -@pytest.mark.parametrize("invalid_input", [ - None, - 1, - 0, - 1.0, - True -]) + +@pytest.mark.parametrize("invalid_input", [None, 1, 0, 1.0, True]) def test_invalid_non_iterable_input(invalid_input): with pytest.raises(TypeError, match="Expected a list or list-like data structure"): - convert_to_list(invalid_input) \ No newline at end of file + convert_to_list(invalid_input) diff --git a/tests/unit/utils/test_docs_links.py b/tests/unit/utils/test_docs_links.py index deb9f512..478ba3b2 100644 --- a/tests/unit/utils/test_docs_links.py +++ b/tests/unit/utils/test_docs_links.py @@ -4,7 +4,8 @@ urls = list(docslinks.values()) + @pytest.mark.parametrize("url", urls) def test_valid_links(url): - response = requests.get(url) - assert response.status_code == 200, f"Docs link is invalid: {url}" \ No newline at end of file + response = requests.get(url) + assert response.status_code == 200, f"Docs link is invalid: {url}" diff --git a/tests/unit/utils/test_normalize_host.py b/tests/unit/utils/test_normalize_host.py index b5a54876..d6ea9b31 100644 --- a/tests/unit/utils/test_normalize_host.py +++ b/tests/unit/utils/test_normalize_host.py @@ -1,18 +1,26 @@ from pinecone.utils import normalize_host + def test_when_url_is_none(): assert normalize_host(None) is None + def test_when_url_is_https(): - assert normalize_host('https://index-name-abcdef.svc.pinecone.io') == 'https://index-name-abcdef.svc.pinecone.io' + assert normalize_host("https://index-name-abcdef.svc.pinecone.io") == "https://index-name-abcdef.svc.pinecone.io" + def test_when_url_is_http(): - # This should not occur in prod, but if it does, we will leave it alone. + # This should not occur in prod, but if it does, we will leave it alone. # Could be useful when testing with local proxies. - assert normalize_host('http://index-name-abcdef.svc.pinecone.io') == 'http://index-name-abcdef.svc.pinecone.io' + assert normalize_host("http://index-name-abcdef.svc.pinecone.io") == "http://index-name-abcdef.svc.pinecone.io" + def test_when_url_is_host_without_protocol(): - assert normalize_host('index-name-abcdef.svc.pinecone.io') == 'https://index-name-abcdef.svc.pinecone.io' + assert normalize_host("index-name-abcdef.svc.pinecone.io") == "https://index-name-abcdef.svc.pinecone.io" + def test_can_be_called_multiple_times(): - assert normalize_host(normalize_host('index-name-abcdef.svc.pinecone.io')) == 'https://index-name-abcdef.svc.pinecone.io' \ No newline at end of file + assert ( + normalize_host(normalize_host("index-name-abcdef.svc.pinecone.io")) + == "https://index-name-abcdef.svc.pinecone.io" + ) diff --git a/tests/unit/utils/test_setup_openapi_client.py b/tests/unit/utils/test_setup_openapi_client.py index 9c4f355a..e4e77df4 100644 --- a/tests/unit/utils/test_setup_openapi_client.py +++ b/tests/unit/utils/test_setup_openapi_client.py @@ -5,19 +5,19 @@ from pinecone.core.client.api_client import ApiClient from pinecone.utils.setup_openapi_client import setup_openapi_client, build_plugin_setup_client -class TestSetupOpenAPIClient(): + +class TestSetupOpenAPIClient: def test_setup_openapi_client(self): - config = ConfigBuilder.build( - api_key="my-api-key", - host="https://my-controller-host" - ) + config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host") openapi_config = ConfigBuilder.build_openapi_config(config) assert openapi_config.host == "https://my-controller-host" - control_plane_client = setup_openapi_client(ApiClient, ManageIndexesApi, config=config, openapi_config=openapi_config, pool_threads=2) + control_plane_client = setup_openapi_client( + ApiClient, ManageIndexesApi, config=config, openapi_config=openapi_config, pool_threads=2 + ) user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+ \(urllib3\:\d+\.\d+\.\d+\)") assert re.match(user_agent_regex, control_plane_client.api_client.user_agent) - assert re.match(user_agent_regex, control_plane_client.api_client.default_headers['User-Agent']) + assert re.match(user_agent_regex, control_plane_client.api_client.default_headers["User-Agent"]) def test_setup_openapi_client_with_api_version(self): config = ConfigBuilder.build( @@ -27,60 +27,61 @@ def test_setup_openapi_client_with_api_version(self): openapi_config = ConfigBuilder.build_openapi_config(config) assert openapi_config.host == "https://my-controller-host" - control_plane_client = setup_openapi_client(ApiClient, ManageIndexesApi, config=config, openapi_config=openapi_config, pool_threads=2, api_version="2024-04") + control_plane_client = setup_openapi_client( + ApiClient, + ManageIndexesApi, + config=config, + openapi_config=openapi_config, + pool_threads=2, + api_version="2024-04", + ) user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+ \(urllib3\:\d+\.\d+\.\d+\)") assert re.match(user_agent_regex, control_plane_client.api_client.user_agent) - assert re.match(user_agent_regex, control_plane_client.api_client.default_headers['User-Agent']) - assert control_plane_client.api_client.default_headers['X-Pinecone-API-Version'] == "2024-04" + assert re.match(user_agent_regex, control_plane_client.api_client.default_headers["User-Agent"]) + assert control_plane_client.api_client.default_headers["X-Pinecone-API-Version"] == "2024-04" -class TestBuildPluginSetupClient(): - @pytest.mark.parametrize("plugin_api_version,plugin_host", [ - (None, None), - ("2024-07", "https://my-plugin-host") - ]) +class TestBuildPluginSetupClient: + @pytest.mark.parametrize("plugin_api_version,plugin_host", [(None, None), ("2024-07", "https://my-plugin-host")]) def test_setup_openapi_client_with_host_override(self, plugin_api_version, plugin_host): # These configurations represent the configurations that the core sdk # (e.g. Pinecone class) will have built prior to invoking the plugin setup. - # In real usage, this takes place during the Pinecone class initialization + # In real usage, this takes place during the Pinecone class initialization # and pulls together configuration from all sources (kwargs and env vars). - # It reflects a merging of the user's configuration and the defaults set + # It reflects a merging of the user's configuration and the defaults set # by the sdk. config = ConfigBuilder.build( api_key="my-api-key", host="https://api.pinecone.io", source_tag="my_source_tag", proxy_url="http://my-proxy.com", - ssl_ca_certs="path/to/bundle.pem" + ssl_ca_certs="path/to/bundle.pem", ) openapi_config = ConfigBuilder.build_openapi_config(config) # The core sdk (e.g. Pinecone class) will be responsible for invoking the # build_plugin_setup_client method before passing the result to the plugin - # install method. This is + # install method. This is # somewhat like currying the openapi setup function, because we want some - # information to be controled by the core sdk (e.g. the user-agent string, - # proxy settings, etc) while allowing the plugin to pass the parts of the - # configuration that are relevant to it such as api version, base url if + # information to be controled by the core sdk (e.g. the user-agent string, + # proxy settings, etc) while allowing the plugin to pass the parts of the + # configuration that are relevant to it such as api version, base url if # served from somewhere besides api.pinecone.io, etc. client_builder = build_plugin_setup_client(config=config, openapi_config=openapi_config, pool_threads=2) # The plugin machinery in pinecone_plugin_interface will be the one to call - # this client_builder function using classes and other config it discovers inside the + # this client_builder function using classes and other config it discovers inside the # pinecone_plugin namespace package. Putting plugin configuration and references # to the implementation classes into a spot where the pinecone_plugin_interface # can find them is the responsibility of the plugin developer. # - # Passing ManagedIndexesApi and ApiClient here are just a standin for testing - # purposes; in a real plugin, the class would be something else related - # to a new feature, but to test that this setup works I just need a FooApi + # Passing ManagedIndexesApi and ApiClient here are just a standin for testing + # purposes; in a real plugin, the class would be something else related + # to a new feature, but to test that this setup works I just need a FooApi # class generated off the openapi spec. - plugin_api=ManageIndexesApi + plugin_api = ManageIndexesApi plugin_client = client_builder( - api_client_klass=ApiClient, - api_klass=plugin_api, - api_version=plugin_api_version, - host=plugin_host + api_client_klass=ApiClient, api_klass=plugin_api, api_version=plugin_api_version, host=plugin_host ) # Returned client is an instance of the input class @@ -89,21 +90,21 @@ def test_setup_openapi_client_with_host_override(self, plugin_api_version, plugi # We want requests from plugins to have a user-agent matching the host SDK. user_agent_regex = re.compile(r"python-client-\d+\.\d+\.\d+ \(urllib3\:\d+\.\d+\.\d+\)") assert re.match(user_agent_regex, plugin_client.api_client.user_agent) - assert re.match(user_agent_regex, plugin_client.api_client.default_headers['User-Agent']) + assert re.match(user_agent_regex, plugin_client.api_client.default_headers["User-Agent"]) # User agent still contains the source tag that was set in the sdk config - assert 'my_source_tag' in plugin_client.api_client.default_headers['User-Agent'] + assert "my_source_tag" in plugin_client.api_client.default_headers["User-Agent"] # Proxy settings should be passed from the core sdk to the plugin client assert plugin_client.api_client.configuration.proxy == "http://my-proxy.com" assert plugin_client.api_client.configuration.ssl_ca_cert == "path/to/bundle.pem" # Plugins need to be able to pass their own API version (optionally) - assert plugin_client.api_client.default_headers.get('X-Pinecone-API-Version') == plugin_api_version + assert plugin_client.api_client.default_headers.get("X-Pinecone-API-Version") == plugin_api_version # Plugins need to be able to override the host (optionally) if plugin_host: - assert plugin_client.api_client.configuration._base_path == plugin_host + assert plugin_client.api_client.configuration._base_path == plugin_host else: # When plugin does not set a host, it should default to the host set in the core sdk assert plugin_client.api_client.configuration._base_path == "https://api.pinecone.io" diff --git a/tests/unit/utils/test_user_agent.py b/tests/unit/utils/test_user_agent.py index 6886c173..9f17d680 100644 --- a/tests/unit/utils/test_user_agent.py +++ b/tests/unit/utils/test_user_agent.py @@ -2,7 +2,8 @@ from pinecone.utils.user_agent import get_user_agent, get_user_agent_grpc from pinecone.config import ConfigBuilder -class TestUserAgent(): + +class TestUserAgent: def test_user_agent(self): config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host") useragent = get_user_agent(config) @@ -10,30 +11,42 @@ def test_user_agent(self): assert re.search(r"urllib3:\d+\.\d+\.\d+", useragent) is not None def test_user_agent_with_source_tag(self): - config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host", source_tag="my_source_tag") + config = ConfigBuilder.build( + api_key="my-api-key", host="https://my-controller-host", source_tag="my_source_tag" + ) useragent = get_user_agent(config) assert re.search(r"python-client-\d+\.\d+\.\d+", useragent) is not None assert re.search(r"urllib3:\d+\.\d+\.\d+", useragent) is not None assert re.search(r"source_tag=my_source_tag", useragent) is not None def test_source_tag_is_normalized(self): - config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host", source_tag="my source tag!!!!") + config = ConfigBuilder.build( + api_key="my-api-key", host="https://my-controller-host", source_tag="my source tag!!!!" + ) useragent = get_user_agent(config) assert re.search(r"source_tag=my_source_tag", useragent) is not None - config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host", source_tag="My Source Tag") + config = ConfigBuilder.build( + api_key="my-api-key", host="https://my-controller-host", source_tag="My Source Tag" + ) useragent = get_user_agent(config) assert re.search(r"source_tag=my_source_tag", useragent) is not None - config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host", source_tag=" My Source Tag 123 ") + config = ConfigBuilder.build( + api_key="my-api-key", host="https://my-controller-host", source_tag=" My Source Tag 123 " + ) useragent = get_user_agent(config) assert re.search(r"source_tag=my_source_tag_123", useragent) is not None - config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host", source_tag=" My Source Tag 123 #### !! ") + config = ConfigBuilder.build( + api_key="my-api-key", host="https://my-controller-host", source_tag=" My Source Tag 123 #### !! " + ) useragent = get_user_agent(config) assert re.search(r"source_tag=my_source_tag_123", useragent) is not None - config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host", source_tag="colon:allowed") + config = ConfigBuilder.build( + api_key="my-api-key", host="https://my-controller-host", source_tag="colon:allowed" + ) useragent = get_user_agent(config) assert re.search(r"source_tag=colon:allowed", useragent) is not None @@ -44,8 +57,10 @@ def test_user_agent_grpc(self): assert re.search(r"urllib3:\d+\.\d+\.\d+", useragent) is not None def test_user_agent_grpc_with_source_tag(self): - config = ConfigBuilder.build(api_key="my-api-key", host="https://my-controller-host", source_tag="my_source_tag") + config = ConfigBuilder.build( + api_key="my-api-key", host="https://my-controller-host", source_tag="my_source_tag" + ) useragent = get_user_agent_grpc(config) assert re.search(r"python-client\[grpc\]-\d+\.\d+\.\d+", useragent) is not None assert re.search(r"urllib3:\d+\.\d+\.\d+", useragent) is not None - assert re.search(r"source_tag=my_source_tag", useragent) is not None \ No newline at end of file + assert re.search(r"source_tag=my_source_tag", useragent) is not None diff --git a/tests/unit_grpc/conftest.py b/tests/unit_grpc/conftest.py index bb164195..2ac4cfea 100644 --- a/tests/unit_grpc/conftest.py +++ b/tests/unit_grpc/conftest.py @@ -5,6 +5,7 @@ def vector_dim(): return 8 + @pytest.fixture def vals1(vector_dim): return [0.1] * vector_dim diff --git a/tests/unit_grpc/test_grpc_index_initialization.py b/tests/unit_grpc/test_grpc_index_initialization.py index 59cb2967..6c8a78fd 100644 --- a/tests/unit_grpc/test_grpc_index_initialization.py +++ b/tests/unit_grpc/test_grpc_index_initialization.py @@ -2,11 +2,12 @@ from pinecone.grpc import PineconeGRPC, GRPCClientConfig from pinecone import ConfigBuilder + class TestGRPCIndexInitialization: def test_init_with_default_config(self): - pc = PineconeGRPC(api_key='YOUR_API_KEY') - index = pc.Index(name='my-index', host='host') - + pc = PineconeGRPC(api_key="YOUR_API_KEY") + index = pc.Index(name="my-index", host="host") + assert index.grpc_client_config.secure == True assert index.grpc_client_config.timeout == 20 assert index.grpc_client_config.conn_timeout == 1 @@ -17,41 +18,37 @@ def test_init_with_default_config(self): # Default metadata, grpc equivalent to http request headers assert len(index.fixed_metadata) == 3 - assert index.fixed_metadata['api-key'] == 'YOUR_API_KEY' - assert index.fixed_metadata['service-name'] == 'my-index' - assert index.fixed_metadata['client-version'] != None + assert index.fixed_metadata["api-key"] == "YOUR_API_KEY" + assert index.fixed_metadata["service-name"] == "my-index" + assert index.fixed_metadata["client-version"] != None def test_init_with_additional_metadata(self): - pc = PineconeGRPC(api_key='YOUR_API_KEY') - config = GRPCClientConfig(additional_metadata={ - 'debug-header': 'value123', - 'debug-header2': 'value456' - }) - index = pc.Index(name='my-index', host='host', grpc_config=config) + pc = PineconeGRPC(api_key="YOUR_API_KEY") + config = GRPCClientConfig(additional_metadata={"debug-header": "value123", "debug-header2": "value456"}) + index = pc.Index(name="my-index", host="host", grpc_config=config) assert len(index.fixed_metadata) == 5 - assert index.fixed_metadata['api-key'] == 'YOUR_API_KEY' - assert index.fixed_metadata['service-name'] == 'my-index' - assert index.fixed_metadata['client-version'] != None - assert index.fixed_metadata['debug-header'] == 'value123' - assert index.fixed_metadata['debug-header2'] == 'value456' + assert index.fixed_metadata["api-key"] == "YOUR_API_KEY" + assert index.fixed_metadata["service-name"] == "my-index" + assert index.fixed_metadata["client-version"] != None + assert index.fixed_metadata["debug-header"] == "value123" + assert index.fixed_metadata["debug-header2"] == "value456" def test_init_with_grpc_config_from_dict(self): - pc = PineconeGRPC(api_key='YOUR_API_KEY') - config = GRPCClientConfig._from_dict({'timeout': 10}) - index = pc.Index(name='my-index', host='host', grpc_config=config) - + pc = PineconeGRPC(api_key="YOUR_API_KEY") + config = GRPCClientConfig._from_dict({"timeout": 10}) + index = pc.Index(name="my-index", host="host", grpc_config=config) + assert index.grpc_client_config.timeout == 10 # Unset fields still get default values assert index.grpc_client_config.reuse_channel == True assert index.grpc_client_config.secure == True - def test_init_with_grpc_config_non_dict(self): - pc = PineconeGRPC(api_key='YOUR_API_KEY') + pc = PineconeGRPC(api_key="YOUR_API_KEY") config = GRPCClientConfig(timeout=10, secure=False) - index = pc.Index(name='my-index', host='host', grpc_config=config) - + index = pc.Index(name="my-index", host="host", grpc_config=config) + assert index.grpc_client_config.timeout == 10 assert index.grpc_client_config.secure == False @@ -60,15 +57,15 @@ def test_init_with_grpc_config_non_dict(self): assert index.grpc_client_config.conn_timeout == 1 def test_config_passed_when_target_by_name(self): - pc = PineconeGRPC(api_key='YOUR_API_KEY') + pc = PineconeGRPC(api_key="YOUR_API_KEY") # Set this state in the host store to skip network call # to find host for name - pc.index_host_store.set_host(pc.config, 'my-index', 'myhost') + pc.index_host_store.set_host(pc.config, "my-index", "myhost") config = GRPCClientConfig(timeout=10, secure=False) - index = pc.Index(name='my-index', grpc_config=config) - + index = pc.Index(name="my-index", grpc_config=config) + assert index.grpc_client_config.timeout == 10 assert index.grpc_client_config.secure == False @@ -77,10 +74,10 @@ def test_config_passed_when_target_by_name(self): assert index.grpc_client_config.conn_timeout == 1 def test_config_passed_when_target_by_host(self): - pc = PineconeGRPC(api_key='YOUR_API_KEY') + pc = PineconeGRPC(api_key="YOUR_API_KEY") config = GRPCClientConfig(timeout=5, secure=True) - index = pc.Index(host='myhost', grpc_config=config) - + index = pc.Index(host="myhost", grpc_config=config) + assert index.grpc_client_config.timeout == 5 assert index.grpc_client_config.secure == True @@ -89,6 +86,6 @@ def test_config_passed_when_target_by_host(self): assert index.grpc_client_config.conn_timeout == 1 def test_config_passes_source_tag_when_set(self): - pc = PineconeGRPC(api_key='YOUR_API_KEY', source_tag='my_source_tag') - index = pc.Index(name='my-index', host='host') + pc = PineconeGRPC(api_key="YOUR_API_KEY", source_tag="my_source_tag") + index = pc.Index(name="my-index", host="host") assert re.search(r"source_tag=my_source_tag", pc.index_api.api_client.user_agent) is not None diff --git a/tests/unit_grpc/test_grpc_index_query.py b/tests/unit_grpc/test_grpc_index_query.py index ead469a9..561b7331 100644 --- a/tests/unit_grpc/test_grpc_index_query.py +++ b/tests/unit_grpc/test_grpc_index_query.py @@ -11,6 +11,7 @@ ) from pinecone.grpc.utils import dict_to_proto_struct + class TestGrpcIndexQuery: def setup_method(self): self.config = Config(api_key="test-api-key", host="foo") diff --git a/tests/unit_grpc/test_sparse_values_factory.py b/tests/unit_grpc/test_sparse_values_factory.py index 5bd6a50b..2938613a 100644 --- a/tests/unit_grpc/test_sparse_values_factory.py +++ b/tests/unit_grpc/test_sparse_values_factory.py @@ -8,6 +8,7 @@ from pinecone.grpc.sparse_values_factory import SparseValuesFactory + class TestSparseValuesFactory: def test_build_when_None(self): assert SparseValuesFactory.build(None) == None @@ -29,60 +30,75 @@ def test_build_when_passed_NonGRPCSparseValues(self): expected = GRPCSparseValues(indices=[0, 2], values=[0.1, 0.3]) assert actual == expected - @pytest.mark.parametrize('input', [ - {'indices': [2], 'values': [0.3]}, - {'indices': [88, 102], 'values': [-0.1, 0.3]}, - {'indices': [0, 2, 4], 'values': [0.1, 0.3, 0.5]}, - {'indices': [0, 2, 4, 6], 'values': [0.1, 0.3, 0.5, 0.7]}, - ]) + @pytest.mark.parametrize( + "input", + [ + {"indices": [2], "values": [0.3]}, + {"indices": [88, 102], "values": [-0.1, 0.3]}, + {"indices": [0, 2, 4], "values": [0.1, 0.3, 0.5]}, + {"indices": [0, 2, 4, 6], "values": [0.1, 0.3, 0.5, 0.7]}, + ], + ) def test_build_when_valid_dictionary(self, input): actual = SparseValuesFactory.build(input) - expected = GRPCSparseValues(indices=input['indices'], values=input['values']) + expected = GRPCSparseValues(indices=input["indices"], values=input["values"]) assert actual == expected - @pytest.mark.parametrize('input', [ - {'indices': np.array([0, 2]), 'values': [0.1, 0.3]}, - {'indices': [0, 2], 'values': np.array([0.1, 0.3])}, - {'indices': np.array([0, 2]), 'values': np.array([0.1, 0.3])}, - {'indices': pd.array([0, 2]), 'values': [0.1, 0.3]}, - {'indices': [0, 2], 'values': pd.array([0.1, 0.3])}, - {'indices': pd.array([0, 2]), 'values': pd.array([0.1, 0.3])}, - {'indices': np.array([0, 2]), 'values': pd.array([0.1, 0.3])}, - {'indices': pd.array([0, 2]), 'values': np.array([0.1, 0.3])}, - ]) + @pytest.mark.parametrize( + "input", + [ + {"indices": np.array([0, 2]), "values": [0.1, 0.3]}, + {"indices": [0, 2], "values": np.array([0.1, 0.3])}, + {"indices": np.array([0, 2]), "values": np.array([0.1, 0.3])}, + {"indices": pd.array([0, 2]), "values": [0.1, 0.3]}, + {"indices": [0, 2], "values": pd.array([0.1, 0.3])}, + {"indices": pd.array([0, 2]), "values": pd.array([0.1, 0.3])}, + {"indices": np.array([0, 2]), "values": pd.array([0.1, 0.3])}, + {"indices": pd.array([0, 2]), "values": np.array([0.1, 0.3])}, + ], + ) def test_build_when_special_data_types(self, input): """ - Test that the factory can handle special data types like + Test that the factory can handle special data types like numpy/pandas integer and float arrays. """ actual = SparseValuesFactory.build(input) expected = GRPCSparseValues(indices=[0, 2], values=[0.1, 0.3]) assert actual == expected - @pytest.mark.parametrize('input', [ - {'indices': [2], 'values': [0.3, 0.3]}, - {'indices': [88, 102], 'values': [-0.1]}, - ]) + @pytest.mark.parametrize( + "input", + [ + {"indices": [2], "values": [0.3, 0.3]}, + {"indices": [88, 102], "values": [-0.1]}, + ], + ) def test_build_when_list_sizes_dont_match(self, input): with pytest.raises(ValueError, match="Sparse values indices and values must have the same length"): SparseValuesFactory.build(input) - @pytest.mark.parametrize('input', [ - {'indices': [2.0], 'values': [0.3]}, - {'indices': ['2'], 'values': [0.3]}, - {'indices': np.array([2.0]), 'values': [0.3]}, - {'indices': pd.array([2.0]), 'values': [0.3]}, - ]) + @pytest.mark.parametrize( + "input", + [ + {"indices": [2.0], "values": [0.3]}, + {"indices": ["2"], "values": [0.3]}, + {"indices": np.array([2.0]), "values": [0.3]}, + {"indices": pd.array([2.0]), "values": [0.3]}, + ], + ) def test_build_when_non_integer_indices(self, input): with pytest.raises(ValueError, match="Found unexpected data in column `sparse_values`"): SparseValuesFactory.build(input) - @pytest.mark.parametrize('input', [ - {'indices': [2], 'values': [3]}, - {'indices': [2], 'values': ['3.2']}, - {'indices': [2], 'values': np.array([3])}, - {'indices': [2], 'values': pd.array([3])}, - ]) + @pytest.mark.parametrize( + "input", + [ + {"indices": [2], "values": [3]}, + {"indices": [2], "values": ["3.2"]}, + {"indices": [2], "values": np.array([3])}, + {"indices": [2], "values": pd.array([3])}, + ], + ) def test_build_when_non_float_values(self, input): with pytest.raises(ValueError, match="Found unexpected data in column `sparse_values`"): SparseValuesFactory.build(input) diff --git a/tests/unit_grpc/test_vector_factory_grpc.py b/tests/unit_grpc/test_vector_factory_grpc.py index 954ab919..f85824e9 100644 --- a/tests/unit_grpc/test_vector_factory_grpc.py +++ b/tests/unit_grpc/test_vector_factory_grpc.py @@ -37,23 +37,17 @@ def test_build_when_nongrpc_vector_with_sparse_values_it_converts(self): sparse_values=SparseValues(indices=[0, 2], values=[0.1, 0.3]), ) - @pytest.mark.parametrize("values_array", [ - [0.1, 0.2, 0.3], - np.array([0.1, 0.2, 0.3]), - pd.array([0.1, 0.2, 0.3]) - ]) + @pytest.mark.parametrize("values_array", [[0.1, 0.2, 0.3], np.array([0.1, 0.2, 0.3]), pd.array([0.1, 0.2, 0.3])]) def test_build_when_tuple_with_two_values(self, values_array): tup = ("1", values_array) actual = VectorFactoryGRPC.build(tup) expected = Vector(id="1", values=[0.1, 0.2, 0.3], metadata=dict_to_proto_struct({})) assert actual == expected - @pytest.mark.parametrize("vector_tup", [ - ("1", 'not an array'), - ("1", {}), - ("1", 'not an array', {"genre": "comedy"}), - ("1", {}, {"genre": "comedy"}) - ]) + @pytest.mark.parametrize( + "vector_tup", + [("1", "not an array"), ("1", {}), ("1", "not an array", {"genre": "comedy"}), ("1", {}, {"genre": "comedy"})], + ) def test_build_when_tuple_values_must_be_list(self, vector_tup): with pytest.raises( TypeError, @@ -61,11 +55,7 @@ def test_build_when_tuple_values_must_be_list(self, vector_tup): ): VectorFactoryGRPC.build(vector_tup) - @pytest.mark.parametrize("values_array", [ - [0.1, 0.2, 0.3], - np.array([0.1, 0.2, 0.3]), - pd.array([0.1, 0.2, 0.3]) - ]) + @pytest.mark.parametrize("values_array", [[0.1, 0.2, 0.3], np.array([0.1, 0.2, 0.3]), pd.array([0.1, 0.2, 0.3])]) def test_build_when_tuple_with_three_values(self, values_array): tup = ("1", values_array, {"genre": "comedy"}) actual = VectorFactoryGRPC.build(tup) @@ -91,10 +81,7 @@ def test_build_when_tuple_too_short(self): tup = ("1",) VectorFactoryGRPC.build(tup) - @pytest.mark.parametrize("metadata", [ - {"genre": "comedy"}, - dict_to_proto_struct({"genre": "comedy"})] - ) + @pytest.mark.parametrize("metadata", [{"genre": "comedy"}, dict_to_proto_struct({"genre": "comedy"})]) def test_build_when_dict(self, metadata): d = {"id": "1", "values": [0.1, 0.2, 0.3], "metadata": metadata} actual = VectorFactoryGRPC.build(d) @@ -140,10 +127,7 @@ def test_build_with_dict_with_sparse_values_object(self, sv_klass): ) assert actual == expected - @pytest.mark.parametrize("input_values", [ - pd.array([0.1, 0.2, 0.3]), - np.array([0.1, 0.2, 0.3]) - ]) + @pytest.mark.parametrize("input_values", [pd.array([0.1, 0.2, 0.3]), np.array([0.1, 0.2, 0.3])]) def test_build_when_dict_with_special_values(self, input_values): d = {"id": "1", "values": input_values, "metadata": {"genre": "comedy"}} actual = VectorFactoryGRPC.build(d) @@ -160,14 +144,17 @@ def test_build_when_dict_excess_keys(self): d = {"id": "1", "values": [0.1, 0.2, 0.3], "metadata": {"genre": "comedy"}, "extra": "field"} VectorFactoryGRPC.build(d) - @pytest.mark.parametrize("sv_indices,sv_values", [ - ([0, 2], [0.1, 0.3]), - (pd.array([0, 2]), [0.1, 0.3]), - ([0, 2], pd.array([0.1, 0.3])), - (pd.array([0, 2]), pd.array([0.1, 0.3])), - (np.array([0, 2]), [0.1, 0.3]), - ([0, 2], np.array([0.1, 0.3])) - ]) + @pytest.mark.parametrize( + "sv_indices,sv_values", + [ + ([0, 2], [0.1, 0.3]), + (pd.array([0, 2]), [0.1, 0.3]), + ([0, 2], pd.array([0.1, 0.3])), + (pd.array([0, 2]), pd.array([0.1, 0.3])), + (np.array([0, 2]), [0.1, 0.3]), + ([0, 2], np.array([0.1, 0.3])), + ], + ) def test_build_when_dict_sparse_values(self, sv_indices, sv_values): d = { "id": "1", @@ -200,12 +187,7 @@ def test_build_when_dict_sparse_values_when_SparseValues(self): ) assert actual == expected - @pytest.mark.parametrize("bogus_sparse_values", [ - 1, - "not an array", - [1, 2], - {} - ]) + @pytest.mark.parametrize("bogus_sparse_values", [1, "not an array", [1, 2], {}]) def test_build_when_dict_sparse_values_errors_when_invalid_sparse_values_values(self, bogus_sparse_values): with pytest.raises(ValueError, match="Found unexpected data in column `sparse_values`"): d = { @@ -216,12 +198,7 @@ def test_build_when_dict_sparse_values_errors_when_invalid_sparse_values_values( } VectorFactoryGRPC.build(d) - @pytest.mark.parametrize("bogus_sparse_indices", [ - 1, - "not an array", - [0.1, 0.2], - {} - ]) + @pytest.mark.parametrize("bogus_sparse_indices", [1, "not an array", [0.1, 0.2], {}]) def test_build_when_dict_sparse_values_errors_when_indices_not_valid_list(self, bogus_sparse_indices): with pytest.raises(ValueError, match="Found unexpected data in column `sparse_values`"): d = { @@ -236,48 +213,45 @@ def test_build_when_errors_when_other_type(self): with pytest.raises(ValueError, match="Invalid vector value passed: cannot interpret type"): VectorFactoryGRPC.build(1) - @pytest.mark.parametrize("bogus_sparse_values", [ - 1, - "not a dict", - [1, 2, 3], - [], - ]) + @pytest.mark.parametrize( + "bogus_sparse_values", + [ + 1, + "not a dict", + [1, 2, 3], + [], + ], + ) def test_build_when_invalid_sparse_values_type_in_dict(self, bogus_sparse_values): with pytest.raises(ValueError, match="Column `sparse_values` is expected to be a dictionary"): d = { - 'id': '1', - 'values': [0.1, 0.2, 0.3], - 'metadata': {'genre': 'comedy'}, - 'sparse_values': bogus_sparse_values # not a valid dict + "id": "1", + "values": [0.1, 0.2, 0.3], + "metadata": {"genre": "comedy"}, + "sparse_values": bogus_sparse_values, # not a valid dict } VectorFactoryGRPC.build(d) - @pytest.mark.parametrize("bogus_sparse_values", [ - {}, - {'indices': [0, 2]}, - {'values': [0.1, 0.3]}, - ]) + @pytest.mark.parametrize( + "bogus_sparse_values", + [ + {}, + {"indices": [0, 2]}, + {"values": [0.1, 0.3]}, + ], + ) def test_build_when_missing_keys_in_sparse_values_dict(self, bogus_sparse_values): with pytest.raises(ValueError, match="Missing required keys in data in column `sparse_values`"): d = { - 'id': '1', - 'values': [0.1, 0.2, 0.3], - 'metadata': {'genre': 'comedy'}, - 'sparse_values': bogus_sparse_values + "id": "1", + "values": [0.1, 0.2, 0.3], + "metadata": {"genre": "comedy"}, + "sparse_values": bogus_sparse_values, } VectorFactoryGRPC.build(d) def test_build_when_sparse_values_is_None(self): - d = { - 'id': '1', - 'values': [0.1, 0.2, 0.3], - 'metadata': {'genre': 'comedy'}, - 'sparse_values': None - } + d = {"id": "1", "values": [0.1, 0.2, 0.3], "metadata": {"genre": "comedy"}, "sparse_values": None} actual = VectorFactoryGRPC.build(d) - expected = Vector( - id='1', - values=[0.1, 0.2, 0.3], - metadata=dict_to_proto_struct({'genre': 'comedy'}) - ) - assert actual == expected \ No newline at end of file + expected = Vector(id="1", values=[0.1, 0.2, 0.3], metadata=dict_to_proto_struct({"genre": "comedy"})) + assert actual == expected