diff --git a/CHANGES b/CHANGES index 89e97a6d..d200d535 100644 --- a/CHANGES +++ b/CHANGES @@ -1,8 +1,27 @@ -master -====== +v. 1.5.1 +======== +Switching to endpoint as the only/primary way of specifying databases: + - AstraDBClient tolerates (deprecated, removal in 2.0) id[/region] in get_database + - (internal-use constructors and utilities only accept API Endpoint) + - AstraDBAdmin is the only place where id[/region] will remain an allowed path in 2.0 + - all tests adapted to reflect this simplification +Admins: resilience against DevOps responses omitting 'keyspace'/'keyspaces' +AstraDBAdmin: added filters and automatic pagination to [async_]list_databases +Consistent handling of deletedCount=-1 from the API (always returned as-is) +Cursors: alignment and rework + - states are an enum; state names reworked for clarity (better cursor `__repr__`) + - _copy and _to_sync methods always return a clean pristine cursor + - "retrieved" property deprecated (removal 2.0). Use `consumed`. + - "collection" property deprecated (removal 2.0). Use `data_source`. +Deprecation of all `set_caller` (=> to be set at constructor-time) (removal in 2.0) +Callers and user-agent string: + - remove RAGStack automatic detection + - Deprecate caller_name/caller_version parameters in favour of "callers" pair list + - (minor) breaking change: passing only one of caller_name/caller_version to _copy/with_options will override the whole one-item callers pair list Repo housekeeping - using ruff for imports and formatting (instead of isort+black) by @cbornet - - add ruff rules UP(pyupgrade) + - add ruff rules UP(pyupgrade) by @cbornet + - remove `cassio` unused dependency v. 1.5.0 diff --git a/astrapy/admin.py b/astrapy/admin.py index 6ea25ddf..37fb3059 100644 --- a/astrapy/admin.py +++ b/astrapy/admin.py @@ -21,14 +21,14 @@ import warnings from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Sequence import deprecation from astrapy import __version__ from astrapy.api_commander import APICommander from astrapy.authentication import coerce_token_provider, redact_secret -from astrapy.constants import Environment +from astrapy.constants import CallerType, Environment from astrapy.cursors import CommandCursor from astrapy.defaults import ( API_ENDPOINT_TEMPLATE_ENV_MAP, @@ -43,12 +43,14 @@ DEV_OPS_DATABASE_STATUS_MAINTENANCE, DEV_OPS_DATABASE_STATUS_PENDING, DEV_OPS_DATABASE_STATUS_TERMINATING, + DEV_OPS_DEFAULT_DATABASES_PAGE_SIZE, DEV_OPS_KEYSPACE_POLL_INTERVAL_S, DEV_OPS_RESPONSE_HTTP_ACCEPTED, DEV_OPS_RESPONSE_HTTP_CREATED, DEV_OPS_URL_ENV_MAP, DEV_OPS_VERSION_ENV_MAP, NAMESPACE_DEPRECATION_NOTICE_METHOD, + SET_CALLER_DEPRECATION_NOTICE, ) from astrapy.exceptions import ( DataAPIFaultyResponseException, @@ -57,7 +59,11 @@ base_timeout_info, ) from astrapy.info import AdminDatabaseInfo, DatabaseInfo, FindEmbeddingProvidersResult -from astrapy.meta import check_namespace_keyspace, check_update_db_namespace_keyspace +from astrapy.meta import ( + check_caller_parameters, + check_namespace_keyspace, + check_update_db_namespace_keyspace, +) from astrapy.request_tools import HttpMethod if TYPE_CHECKING: @@ -112,7 +118,7 @@ def parse_api_endpoint(api_endpoint: str) -> ParsedAPIEndpoint | None: Parse an API Endpoint into a ParsedAPIEndpoint structure. Args: - api_endpoint: a full API endpoint for the Data Api. + api_endpoint: a full API endpoint for the Data API. Returns: The parsed ParsedAPIEndpoint. If parsing fails, return None. @@ -194,6 +200,46 @@ def build_api_endpoint(environment: str, database_id: str, region: str) -> str: ) +def check_id_endpoint_parg_kwargs( + p_arg: str | None, + api_endpoint: str | None, + id: str | None, +) -> tuple[str | None, str | None]: + """ + Utility function helping with the transition to endpoint-first constructors, + with ID being the other possibility. + + It is called with the positional argument, the api_endpoint and id kwargs: it + then verifies legitimacy and returns a normalized (endpoint, id) "either" value. + + Note: this uses the ID regexp to recognize IDs. Crucially, no endpoint regexp + here, since even non-Astra endpoints must be properly processed by this validator. + """ + if p_arg is not None: + if id is not None: + raise ValueError( + "Cannot pass `id` with the id/endpoint positional parameter." + ) + if api_endpoint is not None: + raise ValueError( + "Cannot pass `api_endpoint` with the id/endpoint positional parameter." + ) + if re.match(database_id_matcher, p_arg): + return (None, p_arg) + # p_arg is an endpoint: + return (p_arg, None) + # p_arg is None: + if api_endpoint is None and id is None: + return (None, None) + if id is not None: + if api_endpoint is None: + return (None, id) + else: + raise ValueError("Cannot pass `api_endpoint` and `id` at the same time.") + # endpoint is not None: + return (api_endpoint, None) + + def fetch_raw_database_info_from_id_token( id: str, *, @@ -308,7 +354,7 @@ def fetch_database_info( Fetch database information through the DevOps API. Args: - api_endpoint: a full API endpoint for the Data Api. + api_endpoint: a full API endpoint for the Data API. token: a valid token to access the database information. keyspace: the desired keyspace that will be used in the result. If not specified, the resulting database info will show it as None. @@ -335,7 +381,9 @@ def fetch_database_info( max_time_ms=max_time_ms, ) raw_info = gd_response["info"] - if keyspace_param is not None and keyspace_param not in raw_info["keyspaces"]: + if keyspace_param is not None and keyspace_param not in ( + raw_info.get("keyspaces") or [] + ): raise DevOpsAPIException(f"Keyspace {keyspace_param} not found on DB.") else: return DatabaseInfo( @@ -363,7 +411,7 @@ async def async_fetch_database_info( Async version of the function, for use in an asyncio context. Args: - api_endpoint: a full API endpoint for the Data Api. + api_endpoint: a full API endpoint for the Data API. token: a valid token to access the database information. keyspace: the desired keyspace that will be used in the result. If not specified, the resulting database info will show it as None. @@ -390,7 +438,9 @@ async def async_fetch_database_info( max_time_ms=max_time_ms, ) raw_info = gd_response["info"] - if keyspace_param is not None and keyspace_param not in raw_info["keyspaces"]: + if keyspace_param is not None and keyspace_param not in ( + raw_info.get("keyspaces") or [] + ): raise DevOpsAPIException(f"Keyspace {keyspace_param} not found on DB.") else: return DatabaseInfo( @@ -415,8 +465,8 @@ def _recast_as_admin_database_info( info=DatabaseInfo( id=admin_database_info_dict["id"], region=admin_database_info_dict["info"]["region"], - keyspace=admin_database_info_dict["info"]["keyspace"], - namespace=admin_database_info_dict["info"]["keyspace"], + keyspace=admin_database_info_dict["info"].get("keyspace"), + namespace=admin_database_info_dict["info"].get("keyspace"), name=admin_database_info_dict["info"]["name"], environment=environment, raw_info=admin_database_info_dict["info"], @@ -441,82 +491,30 @@ def _recast_as_admin_database_info( ) -def normalize_api_endpoint( - id_or_endpoint: str, - region: str | None, - token: TokenProvider, +def normalize_region_for_id( + database_id: str, + token_str: str | None, environment: str, - max_time_ms: int | None = None, + region_param: str | None, + max_time_ms: int | None, ) -> str: - """ - Ensure that a id(+region) / endpoint init signature is normalized into - an api_endpoint string. - - This is an impure function: if necessary, attempt a DevOps API call to - integrate the information (i.e. if a DB ID without region is passed). - - This function is tasked with raising an exception if region is passed along - with an API endpoint (and they do not match). - - Args: - id_or_endpoint: either the Database ID or a full standard endpoint. - region: a string with the database region. - token: a TokenProvider for the possible DevOps request to issue. - environment: one of the Astra DB `astrapy.constants.Environment` values. - max_time_ms: used in case the DevOps API request is necessary. - - Returns: - a normalized API Endpoint string (unless it raises an exception). - """ - _api_endpoint: str - parsed_endpoint = parse_api_endpoint(id_or_endpoint) - if parsed_endpoint is not None: - if region is not None and region != parsed_endpoint.region: - raise ValueError( - "An explicit `region` parameter is provided, which does not match " - "the supplied API endpoint. Please refrain from specifying `region`." - ) - _api_endpoint = id_or_endpoint + if region_param: + return region_param else: - # it's a genuine ID - _region: str - if region: - _region = region - else: - logger.info(f"fetching raw database info for {id_or_endpoint}") - this_db_info = fetch_raw_database_info_from_id_token( - id=id_or_endpoint, - token=token.get_token(), - environment=environment, - max_time_ms=max_time_ms, - ) - logger.info(f"finished fetching raw database info for {id_or_endpoint}") - _region = this_db_info["info"]["region"] - _api_endpoint = build_api_endpoint( + logger.info(f"fetching raw database info for {database_id}") + this_db_info = fetch_raw_database_info_from_id_token( + id=database_id, + token=token_str, environment=environment, - database_id=id_or_endpoint, - region=_region, + max_time_ms=max_time_ms, ) - return _api_endpoint.strip("/") - - -def normalize_id_endpoint_parameters(id: str | None, api_endpoint: str | None) -> str: - if id is None: - if api_endpoint is None: - raise ValueError( - "Exactly one of the `id` and `api_endpoint` " - "synonymous parameters must be passed." - ) - else: - return api_endpoint - else: - if api_endpoint is not None: + logger.info(f"finished fetching raw database info for {database_id}") + found_region = this_db_info.get("info", {}).get("region") + if not isinstance(found_region, str): raise ValueError( - "The `id` and `api_endpoint` synonymous parameters " - "cannot be supplied at the same time." + f"Could not determine 'region' from database info: {str(this_db_info)}" ) - else: - return id + return found_region class AstraDBAdmin: @@ -530,9 +528,14 @@ class AstraDBAdmin: `astrapy.authentication.TokenProvider`. environment: a label, whose value is one of Environment.PROD (default), Environment.DEV or Environment.TEST. - caller_name: name of the application, or framework, on behalf of which - the DevOps API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which DevOps API calls are performed. These end up in + the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the DevOps API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. Removal 2.0. dev_ops_url: in case of custom deployments, this can be used to specify the URL to the DevOps API, such as "https://api.astra.datastax.com". Generally it can be omitted. The environment (prod/dev/...) is @@ -559,11 +562,13 @@ def __init__( token: str | TokenProvider | None = None, *, environment: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, dev_ops_url: str | None = None, dev_ops_api_version: str | None = None, ) -> None: + callers_param = check_caller_parameters(callers, caller_name, caller_version) self.token_provider = coerce_token_provider(token) self.environment = (environment or Environment.PROD).lower() if self.environment not in Environment.astra_db_values: @@ -584,8 +589,7 @@ def __init__( else: self._dev_ops_commander_headers = {} - self.caller_name = caller_name - self.caller_version = caller_version + self.callers = callers_param self._dev_ops_api_commander = self._get_dev_ops_api_commander() def __repr__(self) -> str: @@ -610,8 +614,7 @@ def __eq__(self, other: Any) -> bool: self.environment == other.environment, self.dev_ops_url == other.dev_ops_url, self.dev_ops_url == other.dev_ops_url, - self.caller_name == other.caller_name, - self.caller_version == other.caller_version, + self.callers == other.callers, self._dev_ops_url == other._dev_ops_url, self._dev_ops_api_version == other._dev_ops_api_version, self._dev_ops_api_commander == other._dev_ops_api_commander, @@ -630,7 +633,7 @@ def _get_dev_ops_api_commander(self) -> APICommander: api_endpoint=DEV_OPS_URL_ENV_MAP[self.environment], path=dev_ops_base_path, headers=self._dev_ops_commander_headers, - callers=[(self.caller_name, self.caller_version)], + callers=self.callers, dev_ops_api=True, ) return dev_ops_commander @@ -640,16 +643,17 @@ def _copy( *, token: str | TokenProvider | None = None, environment: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, dev_ops_url: str | None = None, dev_ops_api_version: str | None = None, ) -> AstraDBAdmin: + callers_param = check_caller_parameters(callers, caller_name, caller_version) return AstraDBAdmin( token=coerce_token_provider(token) or self.token_provider, environment=environment or self.environment, - caller_name=caller_name or self.caller_name, - caller_version=caller_version or self.caller_version, + callers=callers_param or self.callers, dev_ops_url=dev_ops_url or self._dev_ops_url, dev_ops_api_version=dev_ops_api_version or self._dev_ops_api_version, ) @@ -658,6 +662,7 @@ def with_options( self, *, token: str | TokenProvider | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> AstraDBAdmin: @@ -668,27 +673,37 @@ def with_options( token: an Access Token to the database. Example: `"AstraCS:xyz..."`. This can be either a literal token string or a subclass of `astrapy.authentication.TokenProvider`. - caller_name: name of the application, or framework, on behalf of which - the Data API and DevOps API calls are performed. This ends up in + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which DevOps API calls are performed. These end up in the request user-agent. - caller_version: version of the caller. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the DevOps API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. + Removal 2.0. Returns: a new AstraDBAdmin instance. Example: >>> another_astra_db_admin = my_astra_db_admin.with_options( - ... caller_name="caller_identity", - ... caller_version="1.2.0", + ... callers=[("caller_identity", "1.2.0")], ... ) """ + callers_param = check_caller_parameters(callers, caller_name, caller_version) return self._copy( token=token, - caller_name=caller_name, - caller_version=caller_version, + callers=callers_param, ) + @deprecation.deprecated( # type: ignore[misc] + deprecated_in="1.5.1", + removed_in="2.0.0", + current_version=__version__, + details=SET_CALLER_DEPRECATION_NOTICE, + ) def set_caller( self, caller_name: str | None = None, @@ -707,25 +722,34 @@ def set_caller( Example: >>> my_astra_db_admin.set_caller( - ... caller_name="the_caller", - ... caller_version="0.1.0", + ... callers=[("the_caller", "0.1.0")], ... ) """ logger.info(f"setting caller to {caller_name}/{caller_version}") - self.caller_name = caller_name - self.caller_version = caller_version + callers_param = check_caller_parameters([], caller_name, caller_version) + self.callers = callers_param self._dev_ops_api_commander = self._get_dev_ops_api_commander() def list_databases( self, *, + include: str | None = None, + provider: str | None = None, + page_size: int | None = None, max_time_ms: int | None = None, ) -> CommandCursor[AdminDatabaseInfo]: """ Get the list of databases, as obtained with a request to the DevOps API. Args: + include: a filter on what databases are to be returned. As per + DevOps API, defaults to "nonterminated". Pass "all" to include + the already terminated databases. + provider: a filter on the cloud provider for the databases. + As per DevOps API, defaults to "ALL". Pass e.g. "AWS" to + restrict the results. + page_size: number of results per page from the DevOps API. Optional. max_time_ms: a timeout, in milliseconds, for the API request. Returns: @@ -746,30 +770,76 @@ def list_databases( """ logger.info("getting databases (DevOps API)") - gd_list_response = self._dev_ops_api_commander.request( - http_method=HttpMethod.GET, timeout_info=base_timeout_info(max_time_ms) + request_params_0 = { + k: v + for k, v in { + "include": include, + "provider": provider, + "limit": page_size or DEV_OPS_DEFAULT_DATABASES_PAGE_SIZE, + }.items() + if v is not None + } + responses: list[dict[str, Any]] = [] + logger.info("request 0, getting databases (DevOps API)") + response_0 = self._dev_ops_api_commander.request( + http_method=HttpMethod.GET, + request_params=request_params_0, + timeout_info=base_timeout_info(max_time_ms), ) - logger.info("finished getting databases (DevOps API)") - if not isinstance(gd_list_response, list): + if not isinstance(response_0, list): raise DevOpsAPIException( "Faulty response from get-databases DevOps API command.", ) - else: - # we know this is a list of dicts which need a little adjusting - return CommandCursor( - address=self._dev_ops_api_commander.full_path, - items=[ - _recast_as_admin_database_info( - db_dict, - environment=self.environment, - ) - for db_dict in gd_list_response - ], + logger.info("finished request 0, getting databases (DevOps API)") + responses += [response_0] + while len(responses[-1]) >= request_params_0["limit"]: + if "id" not in responses[-1][-1]: + raise DevOpsAPIException( + "Faulty response from get-databases DevOps API command.", + ) + last_received_db_id = responses[-1][-1]["id"] + request_params_n = { + **request_params_0, + **{"starting_after": last_received_db_id}, + } + logger.info( + "request %s, getting databases (DevOps API)", + len(responses), + ) + response_n = self._dev_ops_api_commander.request( + http_method=HttpMethod.GET, + request_params=request_params_n, + timeout_info=base_timeout_info(max_time_ms), + ) + logger.info( + "finished request %s, getting databases (DevOps API)", + len(responses), ) + if not isinstance(response_n, list): + raise DevOpsAPIException( + "Faulty response from get-databases DevOps API command.", + ) + responses += [response_n] + + logger.info("finished getting databases (DevOps API)") + return CommandCursor( + address=self._dev_ops_api_commander.full_path, + items=[ + _recast_as_admin_database_info( + db_dict, + environment=self.environment, + ) + for response in responses + for db_dict in response + ], + ) async def async_list_databases( self, *, + include: str | None = None, + provider: str | None = None, + page_size: int | None = None, max_time_ms: int | None = None, ) -> CommandCursor[AdminDatabaseInfo]: """ @@ -777,6 +847,13 @@ async def async_list_databases( Async version of the method, for use in an asyncio context. Args: + include: a filter on what databases are to be returned. As per + DevOps API, defaults to "nonterminated". Pass "all" to include + the already terminated databases. + provider: a filter on the cloud provider for the databases. + As per DevOps API, defaults to "ALL". Pass e.g. "AWS" to + restrict the results. + page_size: number of results per page from the DevOps API. Optional. max_time_ms: a timeout, in milliseconds, for the API request. Returns: @@ -798,26 +875,69 @@ async def async_list_databases( """ logger.info("getting databases (DevOps API), async") - gd_list_response = await self._dev_ops_api_commander.async_request( - http_method=HttpMethod.GET, timeout_info=base_timeout_info(max_time_ms) + request_params_0 = { + k: v + for k, v in { + "include": include, + "provider": provider, + "limit": page_size or DEV_OPS_DEFAULT_DATABASES_PAGE_SIZE, + }.items() + if v is not None + } + responses: list[dict[str, Any]] = [] + logger.info("request 0, getting databases (DevOps API), async") + response_0 = await self._dev_ops_api_commander.async_request( + http_method=HttpMethod.GET, + request_params=request_params_0, + timeout_info=base_timeout_info(max_time_ms), ) - logger.info("finished getting databases (DevOps API), async") - if not isinstance(gd_list_response, list): + if not isinstance(response_0, list): raise DevOpsAPIException( "Faulty response from get-databases DevOps API command.", ) - else: - # we know this is a list of dicts which need a little adjusting - return CommandCursor( - address=self._dev_ops_api_commander.full_path, - items=[ - _recast_as_admin_database_info( - db_dict, - environment=self.environment, - ) - for db_dict in gd_list_response - ], + logger.info("finished request 0, getting databases (DevOps API), async") + responses += [response_0] + while len(responses[-1]) >= request_params_0["limit"]: + if "id" not in responses[-1][-1]: + raise DevOpsAPIException( + "Faulty response from get-databases DevOps API command.", + ) + last_received_db_id = responses[-1][-1]["id"] + request_params_n = { + **request_params_0, + **{"starting_after": last_received_db_id}, + } + logger.info( + "request %s, getting databases (DevOps API)", + len(responses), + ) + response_n = await self._dev_ops_api_commander.async_request( + http_method=HttpMethod.GET, + request_params=request_params_n, + timeout_info=base_timeout_info(max_time_ms), ) + logger.info( + "finished request %s, getting databases (DevOps API), async", + len(responses), + ) + if not isinstance(response_n, list): + raise DevOpsAPIException( + "Faulty response from get-databases DevOps API command.", + ) + responses += [response_n] + + logger.info("finished getting databases (DevOps API), async") + return CommandCursor( + address=self._dev_ops_api_commander.full_path, + items=[ + _recast_as_admin_database_info( + db_dict, + environment=self.environment, + ) + for response in responses + for db_dict in response + ], + ) def database_info( self, id: str, *, max_time_ms: int | None = None @@ -999,8 +1119,11 @@ def create_database( f"{name}/({cloud_provider}, {region}) (DevOps API)" ) return AstraDBDatabaseAdmin.from_astra_db_admin( - id=new_database_id, - region=region, + api_endpoint=build_api_endpoint( + environment=self.environment, + database_id=new_database_id, + region=region, + ), astra_db_admin=self, ) @@ -1117,8 +1240,11 @@ async def async_create_database( f"{name}/({cloud_provider}, {region}) (DevOps API), async" ) return AstraDBDatabaseAdmin.from_astra_db_admin( - id=new_database_id, - region=region, + api_endpoint=build_api_endpoint( + environment=self.environment, + database_id=new_database_id, + region=region, + ), astra_db_admin=self, ) @@ -1287,9 +1413,10 @@ async def async_drop_database( def get_database_admin( self, - id: str | None = None, + api_endpoint_or_id: str | None = None, *, api_endpoint: str | None = None, + id: str | None = None, region: str | None = None, max_time_ms: int | None = None, ) -> AstraDBDatabaseAdmin: @@ -1297,15 +1424,18 @@ def get_database_admin( Create an AstraDBDatabaseAdmin object for admin work within a certain database. Args: - id: the target database ID (e.g. `01234567-89ab-cdef-0123-456789abcdef`) - or the corresponding API Endpoint + api_endpoint_or_id: positional parameter that can stand for both + `api_endpoint` and `id`. Passing them together is an error. + api_endpoint: the API Endpoint for the target database (e.g. `https://-.apps.astra.datastax.com`). - api_endpoint: a named alias for the `id` first (positional) parameter, - with the same meaning. It cannot be passed together with `id`. + The database must exist already for the resulting object + to be effectively used; in other words, this invocation + does not create the database, just the object instance. + id: the target database ID. This is alternative to using the API Endpoint. region: the region to use for connecting to the database. The - database must be located in that region. - The region cannot be specified when the API endoint is used as `id`. - Note that if this parameter is not passed, and cannot be inferred + database must be located in that region. This parameter can be used + only if the database is specified by its ID (instead of API Endpoint). + If this parameter is not passed, and cannot be inferred from the API endpoint, an additional DevOps API request is made to determine the default region and use it subsequently. max_time_ms: a timeout, in milliseconds, for the DevOps API @@ -1329,23 +1459,51 @@ def get_database_admin( `create_database` method. """ - _id_or_endpoint = normalize_id_endpoint_parameters(id, api_endpoint) - - return AstraDBDatabaseAdmin.from_astra_db_admin( - id=_id_or_endpoint, - region=region, - astra_db_admin=self, - max_time_ms=max_time_ms, + _api_endpoint_p, _id_p = check_id_endpoint_parg_kwargs( + p_arg=api_endpoint_or_id, api_endpoint=api_endpoint, id=id ) + # handle the "endpoint passed as id" case first: + if _api_endpoint_p is not None: + if region is not None: + raise ValueError( + "Parameter `region` not supported with an API endpoint." + ) + # in this case max_time_ms is ignored (no calls take place) + return AstraDBDatabaseAdmin.from_astra_db_admin( + api_endpoint=_api_endpoint_p, + astra_db_admin=self, + max_time_ms=max_time_ms, + ) + else: + if _id_p is None: + raise ValueError("Either `api_endpoint` or `id` must be supplied.") + + _region = normalize_region_for_id( + database_id=_id_p, + token_str=self.token_provider.get_token(), + environment=self.environment, + region_param=region, + max_time_ms=max_time_ms, + ) + return AstraDBDatabaseAdmin.from_astra_db_admin( + api_endpoint=build_api_endpoint( + environment=self.environment, + database_id=_id_p, + region=_region, + ), + astra_db_admin=self, + max_time_ms=max_time_ms, + ) def get_database( self, - id: str | None = None, + api_endpoint_or_id: str | None = None, *, api_endpoint: str | None = None, token: str | TokenProvider | None = None, keyspace: str | None = None, namespace: str | None = None, + id: str | None = None, region: str | None = None, api_path: str | None = None, api_version: str | None = None, @@ -1356,24 +1514,26 @@ def get_database( when doing data-level work (such as creating/managing collections). Args: - id: the target database ID (e.g. `01234567-89ab-cdef-0123-456789abcdef`) - or the corresponding API Endpoint + api_endpoint_or_id: positional parameter that can stand for both + `api_endpoint` and `id`. Passing them together is an error. + api_endpoint: the API Endpoint for the target database (e.g. `https://-.apps.astra.datastax.com`). - api_endpoint: a named alias for the `id` first (positional) parameter, - with the same meaning. It cannot be passed together with `id`. + The database must exist already for the resulting object + to be effectively used; in other words, this invocation + does not create the database, just the object instance. token: if supplied, is passed to the Database instead of the one set for this object. This can be either a literal token string or a subclass of `astrapy.authentication.TokenProvider`. keyspace: used to specify a certain keyspace the resulting - Database will primarily work on. If not specified, similar - as for `region`, an additional DevOps API call reveals - the default keyspace for the target database. + Database will primarily work on. If not specified, an additional + DevOps API call reveals the default keyspace for the target database. namespace: an alias for `keyspace`. *DEPRECATED*, removal in 2.0. + id: the target database ID. This is alternative to using the API Endpoint. region: the region to use for connecting to the database. The - database must be located in that region. - The region cannot be specified when the API endoint is used as `id`. - Note that if this parameter is not passed, and cannot be inferred + database must be located in that region. This parameter can be used + only if the database is specified by its ID (instead of API Endpoint). + If this parameter is not passed, and cannot be inferred from the API endpoint, an additional DevOps API request is made to determine the default region and use it subsequently. api_path: path to append to the API Endpoint. In typical usage, this @@ -1388,18 +1548,16 @@ def get_database( Example: >>> my_db = my_astra_db_admin.get_database( - ... "01234567-...", - ... region="us-east1", + ... "https://-.apps.astra.datastax.com", + ... keyspace="my_prod_keyspace", ... ) >>> coll = my_db.create_collection("movies", dimension=2) >>> my_coll.insert_one({"title": "The Title", "$vector": [0.3, 0.4]}) - - Note: - This method does not perform any admin-level operation through - the DevOps API. For actual creation of a database, see the - `create_database` method of class AstraDBAdmin. """ + _api_endpoint_p, _id_p = check_id_endpoint_parg_kwargs( + p_arg=api_endpoint_or_id, api_endpoint=api_endpoint, id=id + ) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -1408,52 +1566,74 @@ def get_database( # lazy importing here to avoid circular dependency from astrapy import Database - _id_or_endpoint = normalize_id_endpoint_parameters(id, api_endpoint) - _token = coerce_token_provider(token) or self.token_provider - - normalized_api_endpoint = normalize_api_endpoint( - id_or_endpoint=_id_or_endpoint, - region=region, - token=_token, - environment=self.environment, - max_time_ms=max_time_ms, - ) - _keyspace: str | None - if keyspace_param: - _keyspace = keyspace_param + # handle the "endpoint passed as id" case first: + if _api_endpoint_p is not None: + if region is not None: + raise ValueError( + "Parameter `region` not supported with an API endpoint." + ) + if keyspace_param: + _keyspace = keyspace_param + else: + parsed_api_endpoint = parse_api_endpoint(_api_endpoint_p) + if parsed_api_endpoint is None: + msg = api_endpoint_parsing_error_message(_api_endpoint_p) + raise ValueError(msg) + _keyspace = self.database_info( + parsed_api_endpoint.database_id, + max_time_ms=max_time_ms, + ).info.keyspace + return Database( + api_endpoint=_api_endpoint_p, + token=_token, + keyspace=_keyspace, + callers=self.callers, + environment=self.environment, + api_path=api_path, + api_version=api_version, + ) else: - parsed_api_endpoint = parse_api_endpoint(normalized_api_endpoint) - if parsed_api_endpoint is None: - msg = api_endpoint_parsing_error_message(normalized_api_endpoint) - raise ValueError(msg) - - this_db_info = self.database_info( - parsed_api_endpoint.database_id, + # the case where an ID is passed: + if _id_p is None: + raise ValueError("Either `api_endpoint` or `id` must be supplied.") + _region = normalize_region_for_id( + database_id=_id_p, + token_str=self.token_provider.get_token(), + environment=self.environment, + region_param=region, max_time_ms=max_time_ms, ) - _keyspace = this_db_info.info.keyspace - - return Database( - api_endpoint=normalized_api_endpoint, - token=_token, - keyspace=_keyspace, - caller_name=self.caller_name, - caller_version=self.caller_version, - environment=self.environment, - api_path=api_path, - api_version=api_version, - ) + if keyspace_param: + _keyspace = keyspace_param + else: + _keyspace = self.database_info( + _id_p, max_time_ms=max_time_ms + ).info.keyspace + return Database( + api_endpoint=build_api_endpoint( + environment=self.environment, + database_id=_id_p, + region=_region, + ), + token=_token, + keyspace=_keyspace, + callers=self.callers, + environment=self.environment, + api_path=api_path, + api_version=api_version, + ) def get_async_database( self, - id: str | None = None, + api_endpoint_or_id: str | None = None, *, api_endpoint: str | None = None, token: str | TokenProvider | None = None, keyspace: str | None = None, namespace: str | None = None, + id: str | None = None, region: str | None = None, api_path: str | None = None, api_version: str | None = None, @@ -1462,8 +1642,60 @@ def get_async_database( Create an AsyncDatabase instance for a specific database, to be used when doing data-level work (such as creating/managing collections). - This method has identical behavior and signature as the sync - counterpart `get_database`: please see that one for more details. + Args: + api_endpoint_or_id: positional parameter that can stand for both + `api_endpoint` and `id`. Passing them together is an error. + api_endpoint: the API Endpoint for the target database + (e.g. `https://-.apps.astra.datastax.com`). + The database must exist already for the resulting object + to be effectively used; in other words, this invocation + does not create the database, just the object instance. + token: if supplied, is passed to the Database instead of + the one set for this object. + This can be either a literal token string or a subclass of + `astrapy.authentication.TokenProvider`. + keyspace: used to specify a certain keyspace the resulting + AsyncDatabase will primarily work on. If not specified, an additional + DevOps API call reveals the default keyspace for the target database. + namespace: an alias for `keyspace`. *DEPRECATED*, removal in 2.0. + id: the target database ID. This is alternative to using the API Endpoint. + region: the region to use for connecting to the database. The + database must be located in that region. This parameter can be used + only if the database is specified by its ID (instead of API Endpoint). + If this parameter is not passed, and cannot be inferred + from the API endpoint, an additional DevOps API request is made + to determine the default region and use it subsequently. + api_path: path to append to the API Endpoint. In typical usage, this + should be left to its default of "/api/json". + api_version: version specifier to append to the API path. In typical + usage, this should be left to its default of "v1". + max_time_ms: a timeout, in milliseconds, for the DevOps API + HTTP request should it be necessary (see the `region` argument). + + Returns: + An AsyncDatabase object ready to be used. + + Example: + >>> async def create_use_collection( + ... admin: AstraDBAdmin, + ... api_endpoint: str, + ... keyspace: str, + ... ) -> None: + ... my_async_db = admin.get_async_database( + ... api_endpoint, + ... keyspace=keyspace, + ... ) + ... a_coll = await my_async_db.create_collection("movies", dimension=2) + ... await a_coll.insert_one( + ... {"title": "The Title", "$vector": [0.3, 0.4]} + ... ) + ... + >>> asyncio.run(create_use_collection( + ... my_admin, + ... "https://-.apps.astra.datastax.com", + ... "default_keyspace", + ... )) + >>> """ keyspace_param = check_namespace_keyspace( @@ -1472,10 +1704,11 @@ def get_async_database( ) return self.get_database( - id=id, + api_endpoint_or_id=api_endpoint_or_id, api_endpoint=api_endpoint, token=token, keyspace=keyspace_param, + id=id, region=region, api_path=api_path, api_version=api_version, @@ -1648,25 +1881,24 @@ class AstraDBDatabaseAdmin(DatabaseAdmin): created by a method call on an AstraDBAdmin. Args: - id: the target database ID (e.g. `01234567-89ab-cdef-0123-456789abcdef`) - or the corresponding API Endpoint + api_endpoint: the API Endpoint for the target database (e.g. `https://-.apps.astra.datastax.com`). - api_endpoint: a named alias for the `id` first (positional) parameter, - with the same meaning. It cannot be passed together with `id`. + The database must exist already for the resulting object + to be effectively used; in other words, this invocation + does not create the database, just the object instance. token: an access token with enough permission to perform admin tasks. This can be either a literal token string or a subclass of `astrapy.authentication.TokenProvider`. - region: the region to use for connecting to the database. The - database must be located in that region. - The region cannot be specified when the API endoint is used as `id`. - Note that if this parameter is not passed, and cannot be inferred - from the API endpoint, an additional DevOps API request is made - to determine the default region and use it subsequently. environment: a label, whose value is one of Environment.PROD (default), Environment.DEV or Environment.TEST. - caller_name: name of the application, or framework, on behalf of which - the DevOps API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which Data API and DevOps API calls are performed. + These end up in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API and + DevOps API calls are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. Removal 2.0. dev_ops_url: in case of custom deployments, this can be used to specify the URL to the DevOps API, such as "https://api.astra.datastax.com". Generally it can be omitted. The environment (prod/dev/...) is @@ -1691,7 +1923,9 @@ class is created by a method such as `Database.get_database_admin()`, Example: >>> from astrapy import DataAPIClient >>> my_client = DataAPIClient("AstraCS:...") - >>> admin_for_my_db = my_client.get_admin().get_database_admin("01234567-...") + >>> admin_for_my_db = my_client.get_admin().get_database_admin( + ... "https://-.apps.astra.datastax.com" + ... ) >>> admin_for_my_db.list_keyspaces() ['default_keyspace', 'staging_keyspace'] >>> admin_for_my_db.info().status @@ -1705,12 +1939,11 @@ class is created by a method such as `Database.get_database_admin()`, def __init__( self, - id: str | None = None, + api_endpoint: str, *, - api_endpoint: str | None = None, token: str | TokenProvider | None = None, - region: str | None = None, environment: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, dev_ops_url: str | None = None, @@ -1723,17 +1956,10 @@ def __init__( # lazy import here to avoid circular dependency from astrapy.database import Database + callers_param = check_caller_parameters(callers, caller_name, caller_version) self.token_provider = coerce_token_provider(token) self.environment = (environment or Environment.PROD).lower() - _id_or_endpoint = normalize_id_endpoint_parameters(id, api_endpoint) - normalized_api_endpoint = normalize_api_endpoint( - id_or_endpoint=_id_or_endpoint, - region=region, - token=self.token_provider, - environment=self.environment, - max_time_ms=max_time_ms, - ) - self.api_endpoint = normalized_api_endpoint + self.api_endpoint = api_endpoint parsed_api_endpoint = parse_api_endpoint(self.api_endpoint) if parsed_api_endpoint is None: msg = api_endpoint_parsing_error_message(self.api_endpoint) @@ -1748,8 +1974,7 @@ def __init__( f'`environment="{parsed_api_endpoint.environment}"` ' "to the class constructor." ) - self.caller_name = caller_name - self.caller_version = caller_version + self.callers = callers_param self.api_path = ( api_path if api_path is not None else API_PATH_ENV_MAP[self.environment] ) @@ -1767,8 +1992,7 @@ def __init__( api_endpoint=self.api_endpoint, token=self.token_provider, keyspace=None, - caller_name=self.caller_name, - caller_version=self.caller_version, + callers=self.callers, environment=self.environment, api_path=self.api_path, api_version=self.api_version, @@ -1805,8 +2029,7 @@ def __init__( self._astra_db_admin = AstraDBAdmin( token=self.token_provider, environment=self.environment, - caller_name=self.caller_name, - caller_version=self.caller_version, + callers=self.callers, dev_ops_url=self.dev_ops_url, dev_ops_api_version=self.dev_ops_api_version, ) @@ -1833,8 +2056,7 @@ def __eq__(self, other: Any) -> bool: self.token_provider == other.token_provider, self.environment == other.environment, self.api_endpoint == other.api_endpoint, - self.caller_name == other.caller_name, - self.caller_version == other.caller_version, + self.callers == other.callers, self.api_path == other.api_path, self.api_version == other.api_version, self.spawner_database == other.spawner_database, @@ -1852,7 +2074,7 @@ def _get_api_commander(self) -> APICommander: api_endpoint=self.api_endpoint, path=base_path, headers=self._commander_headers, - callers=[(self.caller_name, self.caller_version)], + callers=self.callers, ) return api_commander @@ -1870,17 +2092,18 @@ def _get_dev_ops_api_commander(self) -> APICommander: api_endpoint=self.dev_ops_url, path=dev_ops_base_path, headers=self._dev_ops_commander_headers, - callers=[(self.caller_name, self.caller_version)], + callers=self.callers, dev_ops_api=True, ) return dev_ops_commander def _copy( self, - id: str | None = None, + api_endpoint: str | None = None, + *, token: str | TokenProvider | None = None, - region: str | None = None, environment: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, dev_ops_url: str | None = None, @@ -1888,13 +2111,12 @@ def _copy( api_path: str | None = None, api_version: str | None = None, ) -> AstraDBDatabaseAdmin: + callers_param = check_caller_parameters(callers, caller_name, caller_version) return AstraDBDatabaseAdmin( - id=id or self._database_id, + api_endpoint=api_endpoint or self.api_endpoint, token=coerce_token_provider(token) or self.token_provider, - region=region or self._region, environment=environment or self.environment, - caller_name=caller_name or self.caller_name, - caller_version=caller_version or self.caller_version, + callers=callers_param or self.callers, dev_ops_url=dev_ops_url or self.dev_ops_url, dev_ops_api_version=dev_ops_api_version or self.dev_ops_api_version, api_path=api_path or self.api_path, @@ -1903,9 +2125,10 @@ def _copy( def with_options( self, + api_endpoint: str | None = None, *, - id: str | None = None, token: str | TokenProvider | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> AstraDBDatabaseAdmin: @@ -1913,31 +2136,46 @@ def with_options( Create a clone of this AstraDBDatabaseAdmin with some changed attributes. Args: - id: e. g. "01234567-89ab-cdef-0123-456789abcdef". + api_endpoint: the API Endpoint for the target database + (e.g. `https://-.apps.astra.datastax.com`). + The database must exist already for the resulting object + to be effectively used; in other words, this invocation + does not create the database, just the object instance. token: an Access Token to the database. Example: `"AstraCS:xyz..."`. This can be either a literal token string or a subclass of `astrapy.authentication.TokenProvider`. - caller_name: name of the application, or framework, on behalf of which - the Data API and DevOps API calls are performed. This ends up in - the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which Data API and DevOps API calls are performed. + These end up in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API and + DevOps API calls are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. + Removal 2.0. Returns: a new AstraDBDatabaseAdmin instance. Example: >>> admin_for_my_other_db = admin_for_my_db.with_options( - ... id="abababab-0101-2323-4545-6789abcdef01", + ... "https://-.apps.astra.datastax.com", ... ) """ + callers_param = check_caller_parameters(callers, caller_name, caller_version) return self._copy( - id=id, + api_endpoint=api_endpoint, token=token, - caller_name=caller_name, - caller_version=caller_version, + callers=callers_param, ) + @deprecation.deprecated( # type: ignore[misc] + deprecated_in="1.5.1", + removed_in="2.0.0", + current_version=__version__, + details=SET_CALLER_DEPRECATION_NOTICE, + ) def set_caller( self, caller_name: str | None = None, @@ -1962,8 +2200,8 @@ def set_caller( """ logger.info(f"setting caller to {caller_name}/{caller_version}") - self.caller_name = caller_name or self.caller_name - self.caller_version = caller_version or self.caller_version + callers_param = check_caller_parameters([], caller_name, caller_version) + self.callers = callers_param or self.callers self._api_commander = self._get_api_commander() self._dev_ops_api_commander = self._get_dev_ops_api_commander() @@ -1991,25 +2229,20 @@ def region(self) -> str: @staticmethod def from_astra_db_admin( - id: str, + api_endpoint: str, *, - region: str | None, astra_db_admin: AstraDBAdmin, max_time_ms: int | None = None, ) -> AstraDBDatabaseAdmin: """ - Create an AstraDBDatabaseAdmin from an AstraDBAdmin and a database ID. + Create an AstraDBDatabaseAdmin from an AstraDBAdmin and an API Endpoint. Args: - id: the target database ID (e.g. `01234567-89ab-cdef-0123-456789abcdef`) - or the corresponding API Endpoint + api_endpoint: the API Endpoint for the target database (e.g. `https://-.apps.astra.datastax.com`). - region: the region to use for connecting to the database. The - database must be located in that region. - The region cannot be specified when the API endoint is used as `id`. - Note that if this parameter is not passed, and cannot be inferred - from the API endpoint, an additional DevOps API request is made - to determine the default region and use it subsequently. + The database must exist already for the resulting object + to be effectively used; in other words, this invocation + does not create the database, just the object instance. astra_db_admin: an AstraDBAdmin object that has visibility over the target database. max_time_ms: a timeout, in milliseconds, for the DevOps API @@ -2021,7 +2254,7 @@ def from_astra_db_admin( Example: >>> from astrapy import DataAPIClient, AstraDBDatabaseAdmin >>> admin_for_my_db = AstraDBDatabaseAdmin.from_astra_db_admin( - ... id="01234567-...", + ... "https://-.apps.astra.datastax.com", ... astra_db_admin=DataAPIClient("AstraCS:...").get_admin(), ... ) >>> admin_for_my_db.list_keyspaces() @@ -2036,12 +2269,10 @@ def from_astra_db_admin( """ return AstraDBDatabaseAdmin( - id=id, + api_endpoint=api_endpoint, token=astra_db_admin.token_provider, - region=region, environment=astra_db_admin.environment, - caller_name=astra_db_admin.caller_name, - caller_version=astra_db_admin.caller_version, + callers=astra_db_admin.callers, dev_ops_url=astra_db_admin._dev_ops_url, dev_ops_api_version=astra_db_admin._dev_ops_api_version, max_time_ms=max_time_ms, @@ -2052,6 +2283,7 @@ def from_api_endpoint( api_endpoint: str, *, token: str | TokenProvider | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, dev_ops_url: str | None = None, @@ -2061,13 +2293,23 @@ def from_api_endpoint( Create an AstraDBDatabaseAdmin from an API Endpoint and optionally a token. Args: - api_endpoint: a full API endpoint for the Data Api. + api_endpoint: the API Endpoint for the target database + (e.g. `https://-.apps.astra.datastax.com`). + The database must exist already for the resulting object + to be effectively used; in other words, this invocation + does not create the database, just the object instance. token: an access token with enough permissions to do admin work. This can be either a literal token string or a subclass of `astrapy.authentication.TokenProvider`. - caller_name: name of the application, or framework, on behalf of which - the DevOps API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which Data API and DevOps API calls are performed. + These end up in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API and + DevOps API calls are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. + Removal 2.0. dev_ops_url: in case of custom deployments, this can be used to specify the URL to the DevOps API, such as "https://api.astra.datastax.com". Generally it can be omitted. The environment (prod/dev/...) is @@ -2095,15 +2337,14 @@ def from_api_endpoint( see the AstraDBAdmin class. """ + callers_param = check_caller_parameters(callers, caller_name, caller_version) parsed_api_endpoint = parse_api_endpoint(api_endpoint) if parsed_api_endpoint: return AstraDBDatabaseAdmin( - id=parsed_api_endpoint.database_id, + api_endpoint=api_endpoint, token=token, - region=parsed_api_endpoint.region, environment=parsed_api_endpoint.environment, - caller_name=caller_name, - caller_version=caller_version, + callers=callers_param, dev_ops_url=dev_ops_url, dev_ops_api_version=dev_ops_api_version, ) @@ -2212,7 +2453,7 @@ def list_keyspaces(self, *, max_time_ms: int | None = None) -> list[str]: if info.raw_info is None: raise DevOpsAPIException("Could not get the keyspace list.") else: - return info.raw_info.get("info", {}).get("keyspaces", []) # type: ignore[no-any-return] + return info.raw_info.get("info", {}).get("keyspaces") or [] @deprecation.deprecated( # type: ignore[misc] deprecated_in="1.5.0", @@ -2282,7 +2523,7 @@ async def async_list_keyspaces( if info.raw_info is None: raise DevOpsAPIException("Could not get the keyspace list.") else: - return info.raw_info.get("info", {}).get("keyspaces", []) # type: ignore[no-any-return] + return info.raw_info.get("info", {}).get("keyspaces") or [] @deprecation.deprecated( # type: ignore[misc] deprecated_in="1.5.0", @@ -3034,7 +3275,7 @@ def get_database( ) return self._astra_db_admin.get_database( - id=self.api_endpoint, + api_endpoint=self.api_endpoint, token=token, keyspace=keyspace_param, api_path=api_path, @@ -3192,9 +3433,14 @@ class is created by a method such as `Database.get_database_admin()`, usage, this class is created by a method such as `Database.get_database_admin()`, which passes the matching value. Defaults to this portion of the path being absent. - caller_name: name of the application, or framework, on behalf of which - the admin API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which Data API calls are performed. These end up in the + request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. Removal 2.0. spawner_database: either a Database or an AsyncDatabase instance. This represents the database class which spawns this admin object, so that, if required, a keyspace creation can retroactively "use" the new keyspace @@ -3228,6 +3474,7 @@ def __init__( environment: str | None = None, api_path: str | None = None, api_version: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, spawner_database: Database | AsyncDatabase | None = None, @@ -3235,11 +3482,11 @@ def __init__( # lazy import here to avoid circular dependency from astrapy.database import Database + callers_param = check_caller_parameters(callers, caller_name, caller_version) self.environment = (environment or Environment.OTHER).lower() self.token_provider = coerce_token_provider(token) self.api_endpoint = api_endpoint - self.caller_name = caller_name - self.caller_version = caller_version + self.callers = callers_param self.api_path = api_path if api_path is not None else "" self.api_version = api_version if api_version is not None else "" self._commander_headers = { @@ -3256,8 +3503,7 @@ def __init__( api_endpoint=self.api_endpoint, token=self.token_provider, keyspace=None, - caller_name=self.caller_name, - caller_version=self.caller_version, + callers=self.callers, environment=self.environment, api_path=self.api_path, api_version=self.api_version, @@ -3291,7 +3537,7 @@ def _get_api_commander(self) -> APICommander: api_endpoint=self.api_endpoint, path=base_path, headers=self._commander_headers, - callers=[(self.caller_name, self.caller_version)], + callers=self.callers, ) return api_commander @@ -3302,17 +3548,18 @@ def _copy( environment: str | None = None, api_path: str | None = None, api_version: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> DataAPIDatabaseAdmin: + callers_param = check_caller_parameters(callers, caller_name, caller_version) return DataAPIDatabaseAdmin( api_endpoint=api_endpoint or self.api_endpoint, token=coerce_token_provider(token) or self.token_provider, environment=environment or self.environment, api_path=api_path or self.api_path, api_version=api_version or self.api_version, - caller_name=caller_name or self.caller_name, - caller_version=caller_version or self.caller_version, + callers=callers_param or self.callers, ) def with_options( @@ -3320,6 +3567,7 @@ def with_options( *, api_endpoint: str | None = None, token: str | TokenProvider | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> DataAPIDatabaseAdmin: @@ -3332,9 +3580,15 @@ def with_options( token: an access token with enough permission to perform admin tasks. This can be either a literal token string or a subclass of `astrapy.authentication.TokenProvider`. - caller_name: name of the application, or framework, on behalf of which - the admin API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which Data API calls are performed. These end up in the + request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. + Removal 2.0. Returns: a new DataAPIDatabaseAdmin instance. @@ -3345,13 +3599,19 @@ def with_options( ... ) """ + callers_param = check_caller_parameters(callers, caller_name, caller_version) return self._copy( api_endpoint=api_endpoint, token=token, - caller_name=caller_name, - caller_version=caller_version, + callers=callers_param, ) + @deprecation.deprecated( # type: ignore[misc] + deprecated_in="1.5.1", + removed_in="2.0.0", + current_version=__version__, + details=SET_CALLER_DEPRECATION_NOTICE, + ) def set_caller( self, caller_name: str | None = None, @@ -3376,8 +3636,8 @@ def set_caller( """ logger.info(f"setting caller to {caller_name}/{caller_version}") - self.caller_name = caller_name - self.caller_version = caller_version + callers_param = check_caller_parameters([], caller_name, caller_version) + self.callers = callers_param self._api_commander = self._get_api_commander() @deprecation.deprecated( # type: ignore[misc] @@ -4094,8 +4354,7 @@ def get_database( api_endpoint=self.api_endpoint, token=coerce_token_provider(token) or self.token_provider, keyspace=keyspace_param, - caller_name=self.caller_name, - caller_version=self.caller_version, + callers=self.callers, environment=self.environment, api_path=api_path, api_version=api_version, diff --git a/astrapy/api_commander.py b/astrapy/api_commander.py index 2c5b2a9a..892c779b 100644 --- a/astrapy/api_commander.py +++ b/astrapy/api_commander.py @@ -17,10 +17,11 @@ import json import logging from types import TracebackType -from typing import TYPE_CHECKING, Any, Dict, Iterable, cast +from typing import TYPE_CHECKING, Any, Dict, Iterable, Sequence, cast import httpx +from astrapy.constants import CallerType from astrapy.defaults import ( DEFAULT_REDACTED_HEADER_NAMES, DEFAULT_REQUEST_TIMEOUT_MS, @@ -46,7 +47,6 @@ from astrapy.user_agents import ( compose_full_user_agent, detect_astrapy_user_agent, - detect_ragstack_user_agent, ) if TYPE_CHECKING: @@ -54,7 +54,6 @@ user_agent_astrapy = detect_astrapy_user_agent() -user_agent_ragstack = detect_ragstack_user_agent() logger = logging.getLogger(__name__) @@ -67,7 +66,7 @@ def __init__( api_endpoint: str, path: str, headers: dict[str, str | None] = {}, - callers: list[tuple[str | None, str | None]] = [], + callers: Sequence[CallerType] = [], redacted_header_names: Iterable[str] = DEFAULT_REDACTED_HEADER_NAMES, dev_ops_api: bool = False, ) -> None: @@ -98,7 +97,7 @@ def __init__( self._api_description = "DevOps API" if self.dev_ops_api else "Data API" full_user_agent_string = compose_full_user_agent( - [user_agent_ragstack] + self.callers + [user_agent_astrapy] + list(self.callers) + [user_agent_astrapy] ) self.caller_header: dict[str, str] = ( {"User-Agent": full_user_agent_string} if full_user_agent_string else {} @@ -145,7 +144,7 @@ def _copy( api_endpoint: str | None = None, path: str | None = None, headers: dict[str, str | None] | None = None, - callers: list[tuple[str | None, str | None]] | None = None, + callers: Sequence[CallerType] | None = None, redacted_header_names: list[str] | None = None, dev_ops_api: bool | None = None, ) -> APICommander: @@ -238,6 +237,7 @@ def raw_request( http_method: str = HttpMethod.POST, payload: dict[str, Any] | None = None, additional_path: str | None = None, + request_params: dict[str, Any] = {}, raise_api_errors: bool = True, timeout_info: TimeoutInfoWideType = None, ) -> httpx.Response: @@ -247,7 +247,7 @@ def raw_request( log_httpx_request( http_method=http_method, full_url=request_url, - request_params={}, + request_params=request_params, redacted_request_headers=self._loggable_headers, payload=normalized_payload, ) @@ -258,6 +258,7 @@ def raw_request( method=http_method, url=request_url, content=encoded_payload, + params=request_params, timeout=timeout or DEFAULT_REQUEST_TIMEOUT_MS, headers=self.full_headers, ) @@ -280,6 +281,7 @@ async def async_raw_request( http_method: str = HttpMethod.POST, payload: dict[str, Any] | None = None, additional_path: str | None = None, + request_params: dict[str, Any] = {}, raise_api_errors: bool = True, timeout_info: TimeoutInfoWideType = None, ) -> httpx.Response: @@ -289,7 +291,7 @@ async def async_raw_request( log_httpx_request( http_method=http_method, full_url=request_url, - request_params={}, + request_params=request_params, redacted_request_headers=self._loggable_headers, payload=normalized_payload, ) @@ -300,6 +302,7 @@ async def async_raw_request( method=http_method, url=request_url, content=encoded_payload, + params=request_params, timeout=timeout or DEFAULT_REQUEST_TIMEOUT_MS, headers=self.full_headers, ) @@ -322,6 +325,7 @@ def request( http_method: str = HttpMethod.POST, payload: dict[str, Any] | None = None, additional_path: str | None = None, + request_params: dict[str, Any] = {}, raise_api_errors: bool = True, timeout_info: TimeoutInfoWideType = None, ) -> dict[str, Any]: @@ -329,6 +333,7 @@ def request( http_method=http_method, payload=payload, additional_path=additional_path, + request_params=request_params, raise_api_errors=raise_api_errors, timeout_info=timeout_info, ) @@ -342,6 +347,7 @@ async def async_request( http_method: str = HttpMethod.POST, payload: dict[str, Any] | None = None, additional_path: str | None = None, + request_params: dict[str, Any] = {}, raise_api_errors: bool = True, timeout_info: TimeoutInfoWideType = None, ) -> dict[str, Any]: @@ -349,6 +355,7 @@ async def async_request( http_method=http_method, payload=payload, additional_path=additional_path, + request_params=request_params, raise_api_errors=raise_api_errors, timeout_info=timeout_info, ) diff --git a/astrapy/client.py b/astrapy/client.py index 18cdf7f0..6af14b50 100644 --- a/astrapy/client.py +++ b/astrapy/client.py @@ -15,23 +15,28 @@ from __future__ import annotations import logging -import re -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Sequence +import deprecation + +from astrapy import __version__ from astrapy.admin import ( - api_endpoint_parser, api_endpoint_parsing_error_message, build_api_endpoint, - database_id_matcher, - fetch_raw_database_info_from_id_token, + check_id_endpoint_parg_kwargs, generic_api_url_parsing_error_message, - normalize_id_endpoint_parameters, + normalize_region_for_id, parse_api_endpoint, parse_generic_api_url, ) from astrapy.authentication import coerce_token_provider, redact_secret -from astrapy.constants import Environment -from astrapy.meta import check_namespace_keyspace +from astrapy.constants import CallerType, Environment +from astrapy.defaults import SET_CALLER_DEPRECATION_NOTICE +from astrapy.meta import ( + check_caller_parameters, + check_deprecated_id_region, + check_namespace_keyspace, +) if TYPE_CHECKING: from astrapy import AsyncDatabase, Database @@ -59,10 +64,15 @@ class DataAPIClient: environment: a string representing the target Data API environment. It can be left unspecified for the default value of `Environment.PROD`; other values include `Environment.OTHER`, `Environment.DSE`. - caller_name: name of the application, or framework, on behalf of which - the Data API and DevOps API calls are performed. This ends up in - the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which Data API and DevOps API calls are performed. + These end up in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API and + DevOps API calls are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. + Removal 2.0. Example: >>> from astrapy import DataAPIClient @@ -84,17 +94,18 @@ def __init__( token: str | TokenProvider | None = None, *, environment: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> None: + callers_param = check_caller_parameters(callers, caller_name, caller_version) self.token_provider = coerce_token_provider(token) self.environment = (environment or Environment.PROD).lower() if self.environment not in Environment.values: raise ValueError(f"Unsupported `environment` value: '{self.environment}'.") - self._caller_name = caller_name - self._caller_version = caller_version + self.callers = callers_param def __repr__(self) -> str: token_desc: str | None @@ -116,46 +127,36 @@ def __eq__(self, other: Any) -> bool: [ self.token_provider == other.token_provider, self.environment == other.environment, - self._caller_name == other._caller_name, - self._caller_version == other._caller_version, + self.callers == other.callers, ] ) else: return False def __getitem__(self, database_id_or_api_endpoint: str) -> Database: - if self.environment in Environment.astra_db_values: - if re.match(database_id_matcher, database_id_or_api_endpoint): - return self.get_database(database_id_or_api_endpoint) - elif re.match(api_endpoint_parser, database_id_or_api_endpoint): - return self.get_database_by_api_endpoint(database_id_or_api_endpoint) - else: - raise ValueError( - "The provided input does not look like either a database ID " - f"or an API endpoint ('{database_id_or_api_endpoint}')." - ) - else: - return self.get_database_by_api_endpoint(database_id_or_api_endpoint) + return self.get_database(api_endpoint_or_id=database_id_or_api_endpoint) def _copy( self, *, token: str | TokenProvider | None = None, environment: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> DataAPIClient: + callers_param = check_caller_parameters(callers, caller_name, caller_version) return DataAPIClient( token=coerce_token_provider(token) or self.token_provider, environment=environment or self.environment, - caller_name=caller_name or self._caller_name, - caller_version=caller_version or self._caller_version, + callers=callers_param or self.callers, ) def with_options( self, *, token: str | TokenProvider | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> DataAPIClient: @@ -166,27 +167,37 @@ def with_options( token: an Access Token to the database. Example: `"AstraCS:xyz..."`. This can be either a literal token string or a subclass of `astrapy.authentication.TokenProvider`. - caller_name: name of the application, or framework, on behalf of which - the Data API and DevOps API calls are performed. This ends up in - the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which Data API and DevOps API calls are performed. + These end up in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API and + DevOps API calls are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. + Removal 2.0. Returns: a new DataAPIClient instance. Example: >>> another_client = my_client.with_options( - ... caller_name="caller_identity", - ... caller_version="1.2.0", + ... callers=[("caller_identity", "1.2.0")], ... ) """ + callers_param = check_caller_parameters(callers, caller_name, caller_version) return self._copy( token=token, - caller_name=caller_name, - caller_version=caller_version, + callers=callers_param, ) + @deprecation.deprecated( # type: ignore[misc] + deprecated_in="1.5.1", + removed_in="2.0.0", + current_version=__version__, + details=SET_CALLER_DEPRECATION_NOTICE, + ) def set_caller( self, caller_name: str | None = None, @@ -208,17 +219,18 @@ def set_caller( """ logger.info(f"setting caller to {caller_name}/{caller_version}") - self._caller_name = caller_name - self._caller_version = caller_version + callers_param = check_caller_parameters([], caller_name, caller_version) + self.callers = callers_param def get_database( self, - id: str | None = None, + api_endpoint_or_id: str | None = None, *, api_endpoint: str | None = None, token: str | TokenProvider | None = None, keyspace: str | None = None, namespace: str | None = None, + id: str | None = None, region: str | None = None, api_path: str | None = None, api_version: str | None = None, @@ -228,23 +240,25 @@ def get_database( Get a Database object from this client, for doing data-related work. Args: - id: the target database ID or the corresponding API Endpoint. + api_endpoint_or_id: positional parameter that can stand for both + `api_endpoint` and `id`. Passing them together is an error. + api_endpoint: the API Endpoint for the target database + (e.g. `https://-.apps.astra.datastax.com`). The database must exist already for the resulting object to be effectively used; in other words, this invocation does not create the database, just the object instance. Actual admin work can be achieved by using the AstraDBAdmin object. - api_endpoint: a named alias for the `id` first (positional) parameter, - with the same meaning. It cannot be passed together with `id`. token: if supplied, is passed to the Database instead of the client token. This can be either a literal token string or a subclass of `astrapy.authentication.TokenProvider`. keyspace: if provided, it is passed to the Database; otherwise the Database class will apply an environment-specific default. namespace: an alias for `keyspace`. *DEPRECATED*, removal in 2.0. + id: the target database ID. This is alternative to using the API Endpoint. region: the region to use for connecting to the database. The - database must be located in that region. - The region cannot be specified when the API endoint is used as `id`. - Note that if this parameter is not passed, and cannot be inferred + database must be located in that region. This parameter can be used + only if the database is specified by its ID (instead of API Endpoint). + If this parameter is not passed, and cannot be inferred from the API endpoint, an additional DevOps API request is made to determine the default region and use it subsequently. api_path: path to append to the API Endpoint. In typical usage, this @@ -273,6 +287,10 @@ def get_database( `create_database` method of class AstraDBAdmin. """ + _api_endpoint_p, _id_p = check_id_endpoint_parg_kwargs( + p_arg=api_endpoint_or_id, api_endpoint=api_endpoint, id=id + ) + check_deprecated_id_region(_id_p, region) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -281,52 +299,42 @@ def get_database( # lazy importing here to avoid circular dependency from astrapy import Database - # id/endpoint parameter normalization - _id_or_endpoint = normalize_id_endpoint_parameters(id, api_endpoint) if self.environment in Environment.astra_db_values: # handle the "endpoint passed as id" case first: - if re.match(api_endpoint_parser, _id_or_endpoint): + if _api_endpoint_p is not None: if region is not None: raise ValueError( - "Parameter `region` not supported when supplying an API endpoint." + "Parameter `region` not supported with an API endpoint." ) # in this case max_time_ms is ignored (no calls take place) return self.get_database_by_api_endpoint( - api_endpoint=_id_or_endpoint, + api_endpoint=_api_endpoint_p, token=token, keyspace=keyspace_param, api_path=api_path, api_version=api_version, ) else: - # handle overrides. Only region is needed (keyspace can stay empty) - if region: - _region = region - else: - logger.info(f"fetching raw database info for {_id_or_endpoint}") - this_db_info = fetch_raw_database_info_from_id_token( - id=_id_or_endpoint, - token=self.token_provider.get_token(), - environment=self.environment, - max_time_ms=max_time_ms, - ) - logger.info( - f"finished fetching raw database info for {_id_or_endpoint}" - ) - _region = this_db_info["info"]["region"] - + if _id_p is None: + raise ValueError("Either `api_endpoint` or `id` must be supplied.") _token = coerce_token_provider(token) or self.token_provider + _region = normalize_region_for_id( + database_id=_id_p, + token_str=_token.get_token(), + environment=self.environment, + region_param=region, + max_time_ms=max_time_ms, + ) _api_endpoint = build_api_endpoint( environment=self.environment, - database_id=_id_or_endpoint, + database_id=_id_p, region=_region, ) return Database( api_endpoint=_api_endpoint, token=_token, keyspace=keyspace_param, - caller_name=self._caller_name, - caller_version=self._caller_version, + callers=self.callers, environment=self.environment, api_path=api_path, api_version=api_version, @@ -334,13 +342,18 @@ def get_database( else: # in this case, this call is an alias for get_database_by_api_endpoint # - max_time_ms ignored - # - assume `_id_or_endpoint` is actually the endpoint + # - require the endpoint to be passed + if _id_p is not None: + raise ValueError("Cannot use a Database ID outside of Astra DB.") if region is not None: raise ValueError( "Parameter `region` not supported outside of Astra DB." ) + if _api_endpoint_p is None: + raise ValueError("Parameter `api_endpoint` is required.") + # _api_endpoint_p guaranteed not null at this point return self.get_database_by_api_endpoint( - api_endpoint=_id_or_endpoint, + api_endpoint=_api_endpoint_p, token=token, keyspace=keyspace_param, api_path=api_path, @@ -349,33 +362,85 @@ def get_database( def get_async_database( self, - id: str | None = None, + api_endpoint_or_id: str | None = None, *, api_endpoint: str | None = None, token: str | TokenProvider | None = None, keyspace: str | None = None, namespace: str | None = None, + id: str | None = None, region: str | None = None, api_path: str | None = None, api_version: str | None = None, max_time_ms: int | None = None, ) -> AsyncDatabase: """ - Get an AsyncDatabase object from this client. + Get an AsyncDatabase object from this client, for doing data-related work. - This method has identical behavior and signature as the sync - counterpart `get_database`: please see that one for more details. + Args: + api_endpoint_or_id: positional parameter that can stand for both + `api_endpoint` and `id`. Passing them together is an error. + api_endpoint: the API Endpoint for the target database + (e.g. `https://-.apps.astra.datastax.com`). + The database must exist already for the resulting object + to be effectively used; in other words, this invocation + does not create the database, just the object instance. + Actual admin work can be achieved by using the AstraDBAdmin object. + token: if supplied, is passed to the Database instead of the client token. + This can be either a literal token string or a subclass of + `astrapy.authentication.TokenProvider`. + keyspace: if provided, it is passed to the Database; otherwise + the Database class will apply an environment-specific default. + namespace: an alias for `keyspace`. *DEPRECATED*, removal in 2.0. + id: the target database ID. This is alternative to using the API Endpoint. + region: the region to use for connecting to the database. The + database must be located in that region. This parameter can be used + only if the database is specified by its ID (instead of API Endpoint). + If this parameter is not passed, and cannot be inferred + from the API endpoint, an additional DevOps API request is made + to determine the default region and use it subsequently. + api_path: path to append to the API Endpoint. In typical usage, this + should be left to its default of "/api/json". + api_version: version specifier to append to the API path. In typical + usage, this should be left to its default of "v1". + max_time_ms: a timeout, in milliseconds, for the DevOps API + HTTP request should it be necessary (see the `region` argument). + + Returns: + a Database object with which to work on Data API collections. + + Example: + >>> async def create_use_db(cl: DataAPIClient, api_ep: str) -> None: + ... async_db = cl.get_async_database(api_ep) + ... my_a_coll = await async_db.create_collection("movies", dimension=2) + ... await my_a_coll.insert_one({"title": "The Title", "$vector": [0.3, 0.4]}) + ... + >>> asyncio.run( + ... create_use_db( + ... my_client, + ... "https://01234567-...us-west1.apps.astra.datastax.com", + ... ) + ... ) + + Note: + This method does not perform any admin-level operation through + the DevOps API. For actual creation of a database, see the + `create_database` method of class AstraDBAdmin. """ + _api_endpoint_p, _id_p = check_id_endpoint_parg_kwargs( + p_arg=api_endpoint_or_id, api_endpoint=api_endpoint, id=id + ) + check_deprecated_id_region(_id_p, region) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, ) return self.get_database( - id=id, - api_endpoint=api_endpoint, + api_endpoint=_api_endpoint_p, token=token, keyspace=keyspace_param, + id=_id_p, region=region, api_path=api_path, api_version=api_version, @@ -458,8 +523,7 @@ def get_database_by_api_endpoint( api_endpoint=api_endpoint, token=_token, keyspace=keyspace_param, - caller_name=self._caller_name, - caller_version=self._caller_version, + callers=self.callers, environment=self.environment, api_path=api_path, api_version=api_version, @@ -475,8 +539,7 @@ def get_database_by_api_endpoint( api_endpoint=parsed_generic_api_endpoint, token=_token, keyspace=keyspace_param, - caller_name=self._caller_name, - caller_version=self._caller_version, + callers=self.callers, environment=self.environment, api_path=api_path, api_version=api_version, @@ -569,8 +632,7 @@ def get_admin( return AstraDBAdmin( token=coerce_token_provider(token) or self.token_provider, environment=self.environment, - caller_name=self._caller_name, - caller_version=self._caller_version, + callers=self.callers, dev_ops_url=dev_ops_url, dev_ops_api_version=dev_ops_api_version, ) diff --git a/astrapy/collection.py b/astrapy/collection.py index 5dc0cbf4..cd3c13b5 100644 --- a/astrapy/collection.py +++ b/astrapy/collection.py @@ -20,7 +20,7 @@ import warnings from concurrent.futures import ThreadPoolExecutor from types import TracebackType -from typing import TYPE_CHECKING, Any, Iterable +from typing import TYPE_CHECKING, Any, Iterable, Sequence import deprecation @@ -29,6 +29,7 @@ from astrapy.api_options import CollectionAPIOptions from astrapy.authentication import coerce_embedding_headers_provider from astrapy.constants import ( + CallerType, DocumentType, FilterType, ProjectionType, @@ -45,6 +46,7 @@ DEFAULT_INSERT_MANY_CHUNK_SIZE, DEFAULT_INSERT_MANY_CONCURRENCY, NAMESPACE_DEPRECATION_NOTICE_METHOD, + SET_CALLER_DEPRECATION_NOTICE, ) from astrapy.exceptions import ( BulkWriteException, @@ -60,7 +62,11 @@ base_timeout_info, ) from astrapy.info import CollectionInfo, CollectionOptions -from astrapy.meta import check_deprecated_vector_ize, check_namespace_keyspace +from astrapy.meta import ( + check_caller_parameters, + check_deprecated_vector_ize, + check_namespace_keyspace, +) from astrapy.results import ( BulkWriteResult, DeleteResult, @@ -226,9 +232,14 @@ class Collection: namespace: an alias for `keyspace`. *DEPRECATED*, removal in 2.0. api_options: An instance of `astrapy.api_options.CollectionAPIOptions` providing the general settings for interacting with the Data API. - caller_name: name of the application, or framework, on behalf of which - the Data API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which the Data API calls are performed. These end up + in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. Removal 2.0. Examples: >>> from astrapy import DataAPIClient, Collection @@ -260,9 +271,11 @@ def __init__( keyspace: str | None = None, namespace: str | None = None, api_options: CollectionAPIOptions | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> None: + callers_param = check_caller_parameters(callers, caller_name, caller_version) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -277,8 +290,7 @@ def __init__( raise ValueError("Attempted to create Collection with 'keyspace' unset.") self._database = database._copy( keyspace=_keyspace, - caller_name=caller_name, - caller_version=caller_version, + callers=callers_param, ) self._name = name @@ -288,8 +300,7 @@ def __init__( **additional_headers, } - self.caller_name = caller_name - self.caller_version = caller_version + self.callers = callers_param self._api_commander = self._get_api_commander() def __repr__(self) -> str: @@ -342,7 +353,7 @@ def _get_api_commander(self) -> APICommander: api_endpoint=self._database.api_endpoint, path=base_path, headers=self._commander_headers, - callers=[(self.caller_name, self.caller_version)], + callers=self.callers, ) return api_commander @@ -354,9 +365,11 @@ def _copy( keyspace: str | None = None, namespace: str | None = None, api_options: CollectionAPIOptions | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> Collection: + callers_param = check_caller_parameters(callers, caller_name, caller_version) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -366,8 +379,7 @@ def _copy( name=name or self.name, keyspace=keyspace_param or self.keyspace, api_options=self.api_options.with_override(api_options), - caller_name=caller_name or self.caller_name, - caller_version=caller_version or self.caller_version, + callers=callers_param or self.callers, ) def with_options( @@ -376,6 +388,7 @@ def with_options( name: str | None = None, embedding_api_key: str | EmbeddingHeadersProvider | None = None, collection_max_time_ms: int | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> Collection: @@ -403,9 +416,15 @@ def with_options( `find`, `delete_many`, `insert_many` and so on), it is strongly suggested to provide a specific timeout as the default one likely wouldn't make much sense. - caller_name: name of the application, or framework, on behalf of which - the Data API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which the Data API calls are performed. These end up + in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. + Removal 2.0. Returns: a new Collection instance. @@ -413,10 +432,11 @@ def with_options( Example: >>> my_other_coll = my_coll.with_options( ... name="the_other_coll", - ... caller_name="caller_identity", + ... callers=[("caller_identity", "0.1.2")], ... ) """ + callers_param = check_caller_parameters(callers, caller_name, caller_version) _api_options = CollectionAPIOptions( embedding_api_key=coerce_embedding_headers_provider(embedding_api_key), max_time_ms=collection_max_time_ms, @@ -425,8 +445,7 @@ def with_options( return self._copy( name=name, api_options=_api_options, - caller_name=caller_name, - caller_version=caller_version, + callers=callers_param, ) def to_async( @@ -438,6 +457,7 @@ def to_async( namespace: str | None = None, embedding_api_key: str | EmbeddingHeadersProvider | None = None, collection_max_time_ms: int | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> AsyncCollection: @@ -472,9 +492,15 @@ def to_async( `find`, `delete_many`, `insert_many` and so on), it is strongly suggested to provide a specific timeout as the default one likely wouldn't make much sense. - caller_name: name of the application, or framework, on behalf of which - the Data API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which the Data API calls are performed. These end up + in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. + Removal 2.0. Returns: the new copy, an AsyncCollection instance. @@ -484,6 +510,7 @@ def to_async( 77 """ + callers_param = check_caller_parameters(callers, caller_name, caller_version) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -498,10 +525,15 @@ def to_async( name=name or self.name, keyspace=keyspace_param or self.keyspace, api_options=self.api_options.with_override(_api_options), - caller_name=caller_name or self.caller_name, - caller_version=caller_version or self.caller_version, + callers=callers_param or self.callers, ) + @deprecation.deprecated( # type: ignore[misc] + deprecated_in="1.5.1", + removed_in="2.0.0", + current_version=__version__, + details=SET_CALLER_DEPRECATION_NOTICE, + ) def set_caller( self, caller_name: str | None = None, @@ -521,8 +553,8 @@ def set_caller( """ logger.info(f"setting caller to {caller_name}/{caller_version}") - self.caller_name = caller_name or self.caller_name - self.caller_version = caller_version or self.caller_version + callers_param = check_caller_parameters([], caller_name, caller_version) + self.callers = callers_param or self.callers self._api_commander = self._get_api_commander() def options(self, *, max_time_ms: int | None = None) -> CollectionOptions: @@ -2368,17 +2400,10 @@ def delete_one( logger.info(f"finished deleteOne on '{self.name}'") if "deletedCount" in do_response.get("status", {}): deleted_count = do_response["status"]["deletedCount"] - if deleted_count == -1: - return DeleteResult( - deleted_count=None, - raw_results=[do_response], - ) - else: - # expected a non-negative integer: - return DeleteResult( - deleted_count=deleted_count, - raw_results=[do_response], - ) + return DeleteResult( + deleted_count=deleted_count, + raw_results=[do_response], + ) else: raise DataAPIFaultyResponseException( text="Faulty response from delete_one API command.", @@ -2800,9 +2825,14 @@ class AsyncCollection: namespace: an alias for `keyspace`. *DEPRECATED*, removal in 2.0. api_options: An instance of `astrapy.api_options.CollectionAPIOptions` providing the general settings for interacting with the Data API. - caller_name: name of the application, or framework, on behalf of which - the Data API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which the Data API calls are performed. These end up + in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. Removal 2.0. Examples: >>> from astrapy import DataAPIClient, AsyncCollection @@ -2836,9 +2866,11 @@ def __init__( keyspace: str | None = None, namespace: str | None = None, api_options: CollectionAPIOptions | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> None: + callers_param = check_caller_parameters(callers, caller_name, caller_version) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -2855,8 +2887,7 @@ def __init__( ) self._database = database._copy( keyspace=_keyspace, - caller_name=caller_name, - caller_version=caller_version, + callers=callers_param, ) self._name = name @@ -2866,8 +2897,7 @@ def __init__( **additional_headers, } - self.caller_name = caller_name - self.caller_version = caller_version + self.callers = callers_param self._api_commander = self._get_api_commander() def __repr__(self) -> str: @@ -2920,7 +2950,7 @@ def _get_api_commander(self) -> APICommander: api_endpoint=self._database.api_endpoint, path=base_path, headers=self._commander_headers, - callers=[(self.caller_name, self.caller_version)], + callers=self.callers, ) return api_commander @@ -2948,9 +2978,11 @@ def _copy( keyspace: str | None = None, namespace: str | None = None, api_options: CollectionAPIOptions | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> AsyncCollection: + callers_param = check_caller_parameters(callers, caller_name, caller_version) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -2960,8 +2992,7 @@ def _copy( name=name or self.name, keyspace=keyspace_param or self.keyspace, api_options=self.api_options.with_override(api_options), - caller_name=caller_name or self.caller_name, - caller_version=caller_version or self.caller_version, + callers=callers_param or self.callers, ) def with_options( @@ -2970,6 +3001,7 @@ def with_options( name: str | None = None, embedding_api_key: str | EmbeddingHeadersProvider | None = None, collection_max_time_ms: int | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> AsyncCollection: @@ -2997,9 +3029,15 @@ def with_options( `find`, `delete_many`, `insert_many` and so on), it is strongly suggested to provide a specific timeout as the default one likely wouldn't make much sense. - caller_name: name of the application, or framework, on behalf of which - the Data API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which the Data API calls are performed. These end up + in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. + Removal 2.0. Returns: a new AsyncCollection instance. @@ -3007,10 +3045,11 @@ def with_options( Example: >>> my_other_async_coll = my_async_coll.with_options( ... name="the_other_coll", - ... caller_name="caller_identity", + ... callers=[("caller_identity", "0.1.2")], ... ) """ + callers_param = check_caller_parameters(callers, caller_name, caller_version) _api_options = CollectionAPIOptions( embedding_api_key=coerce_embedding_headers_provider(embedding_api_key), max_time_ms=collection_max_time_ms, @@ -3019,8 +3058,7 @@ def with_options( return self._copy( name=name, api_options=_api_options, - caller_name=caller_name, - caller_version=caller_version, + callers=callers_param, ) def to_sync( @@ -3032,6 +3070,7 @@ def to_sync( namespace: str | None = None, embedding_api_key: str | EmbeddingHeadersProvider | None = None, collection_max_time_ms: int | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> Collection: @@ -3066,9 +3105,15 @@ def to_sync( `find`, `delete_many`, `insert_many` and so on), it is strongly suggested to provide a specific timeout as the default one likely wouldn't make much sense. - caller_name: name of the application, or framework, on behalf of which - the Data API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which the Data API calls are performed. These end up + in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. + Removal 2.0. Returns: the new copy, a Collection instance. @@ -3078,6 +3123,7 @@ def to_sync( 77 """ + callers_param = check_caller_parameters(callers, caller_name, caller_version) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -3092,10 +3138,15 @@ def to_sync( name=name or self.name, keyspace=keyspace_param or self.keyspace, api_options=self.api_options.with_override(_api_options), - caller_name=caller_name or self.caller_name, - caller_version=caller_version or self.caller_version, + callers=callers_param or self.callers, ) + @deprecation.deprecated( # type: ignore[misc] + deprecated_in="1.5.1", + removed_in="2.0.0", + current_version=__version__, + details=SET_CALLER_DEPRECATION_NOTICE, + ) def set_caller( self, caller_name: str | None = None, @@ -3115,8 +3166,8 @@ def set_caller( """ logger.info(f"setting caller to {caller_name}/{caller_version}") - self.caller_name = caller_name or self.caller_name - self.caller_version = caller_version or self.caller_version + callers_param = check_caller_parameters([], caller_name, caller_version) + self.callers = callers_param or self.callers self._api_commander = self._get_api_commander() async def options(self, *, max_time_ms: int | None = None) -> CollectionOptions: @@ -5088,17 +5139,10 @@ async def delete_one( logger.info(f"finished deleteOne on '{self.name}'") if "deletedCount" in do_response.get("status", {}): deleted_count = do_response["status"]["deletedCount"] - if deleted_count == -1: - return DeleteResult( - deleted_count=None, - raw_results=[do_response], - ) - else: - # expected a non-negative integer: - return DeleteResult( - deleted_count=deleted_count, - raw_results=[do_response], - ) + return DeleteResult( + deleted_count=deleted_count, + raw_results=[do_response], + ) else: raise DataAPIFaultyResponseException( text="Faulty response from delete_one API command.", diff --git a/astrapy/constants.py b/astrapy/constants.py index 84143b8d..00580e03 100644 --- a/astrapy/constants.py +++ b/astrapy/constants.py @@ -14,7 +14,7 @@ from __future__ import annotations -from typing import Any, Dict, Iterable, Union +from typing import Any, Dict, Iterable, Tuple, Union from astrapy.defaults import ( DATA_API_ENVIRONMENT_CASSANDRA, @@ -37,6 +37,7 @@ SortType = Dict[str, Any] FilterType = Dict[str, Any] VectorType = Iterable[float] +CallerType = Tuple[Union[str, None], Union[str, None]] def normalize_optional_projection( diff --git a/astrapy/cursors.py b/astrapy/cursors.py index bf336b11..c059654f 100644 --- a/astrapy/cursors.py +++ b/astrapy/cursors.py @@ -18,7 +18,9 @@ import json import logging import time +import warnings from collections.abc import AsyncIterator +from enum import Enum from typing import ( TYPE_CHECKING, Any, @@ -31,6 +33,8 @@ TypeVar, ) +import deprecation + from astrapy.constants import ( DocumentType, ProjectionType, @@ -55,6 +59,15 @@ IndexPairType = Tuple[str, Optional[int]] +class CursorState(Enum): + # Iteration over results has not started yet (alive=T, started=F) + IDLE = "idle" + # Iteration has started, *can* still yield results (alive=T, started=T) + STARTED = "started" + # Finished/forcibly stopped. Won't return more documents (alive=F) + CLOSED = "closed" + + def _maybe_valid_list_index(key_block: str) -> int | None: # '0', '1' is good. '00', '01', '-30' are not. try: @@ -244,9 +257,8 @@ class BaseCursor: _include_similarity: bool | None _include_sort_vector: bool | None _sort: dict[str, Any] | None - _started: bool - _retrieved: int - _alive: bool + _state: CursorState + _consumed: int _iterator: _LookAheadIterator | _AsyncLookAheadIterator | None = None _api_response_status: dict[str, Any] | None @@ -260,34 +272,11 @@ def __init__( ) -> None: raise NotImplementedError - # Note: this, i.e. cursor[i]/cursor[i:j], is disabled - # pending full skip/limit support by the Data API. - # - # def __getitem__(self: BC, index: Union[int, slice]) -> Union[BC, DocumentType]: - # self._ensure_not_started() - # self._ensure_alive() - # if isinstance(index, int): - # # In this case, a separate cursor is run, not touching self - # return self._item_at_index(index) - # elif isinstance(index, slice): - # start = index.start - # stop = index.stop - # step = index.step - # if step is not None and step != 1: - # raise ValueError("Cursor slicing cannot have arbitrary step") - # _skip = start - # _limit = stop - start - # return self.limit(_limit).skip(_skip) - # else: - # raise TypeError( - # f"cursor indices must be integers or slices, not {type(index).__name__}" - # ) - def __repr__(self) -> str: return ( f'{self.__class__.__name__}("{self._collection.name}", ' f"{self.state}, " - f"retrieved so far: {self.retrieved})" + f"consumed so far: {self.consumed})" ) def _item_at_index(self, index: int) -> DocumentType: @@ -295,16 +284,16 @@ def _item_at_index(self, index: int) -> DocumentType: raise NotImplementedError def _ensure_alive(self) -> None: - if not self._alive: + if not self.alive: raise CursorIsStartedException( - text="Cursor is closed.", + text="Cursor not alive.", cursor_state=self.state, ) - def _ensure_not_started(self) -> None: - if self._started: + def _ensure_idle(self) -> None: + if self._state != CursorState.IDLE: raise CursorIsStartedException( - text="Cursor is started already.", + text="Cursor started already.", cursor_state=self.state, ) @@ -318,7 +307,6 @@ def _copy( skip: int | None = None, include_similarity: bool | None = None, include_sort_vector: bool | None = None, - started: bool | None = None, sort: dict[str, Any] | None = None, ) -> BC: new_cursor = self.__class__( @@ -341,34 +329,17 @@ def _copy( if include_sort_vector is not None else self._include_sort_vector ) - new_cursor._started = started if started is not None else self._started new_cursor._sort = sort if sort is not None else self._sort - if started is False: - new_cursor._retrieved = 0 - new_cursor._alive = True - else: - new_cursor._retrieved = self._retrieved - new_cursor._alive = self._alive return new_cursor @property def state(self) -> str: """ - The current state of this cursor, which can be: - - "new": if iteration over results has not started yet - - "running": iteration has started, can still yield results - - "exhausted": the cursor has finished and won't return documents + The current state of this cursor, which can be one of + the astrapy.cursors.CursorState enum. """ - state_desc: str - if self._started: - if self._alive: - state_desc = "running" - else: - state_desc = "exhausted" - else: - state_desc = "new" - return state_desc + return self._state.value @property def address(self) -> str: @@ -385,7 +356,7 @@ def alive(self) -> bool: Whether the cursor has the potential to yield more data. """ - return self._alive + return self._state != CursorState.CLOSED def clone(self: BC) -> BC: """ @@ -396,14 +367,14 @@ def clone(self: BC) -> BC: i.e. fully un-consumed. """ - return self._copy(started=False) + return self._copy() def close(self) -> None: """ Stop/kill the cursor, regardless of its status. """ - self._alive = False + self._state = CursorState.CLOSED @property def cursor_id(self) -> int: @@ -424,7 +395,7 @@ def limit(self: BC, limit: int | None) -> BC: this cursor itself. """ - self._ensure_not_started() + self._ensure_idle() self._ensure_alive() self._limit = limit if limit != 0 else None return self @@ -440,7 +411,7 @@ def include_similarity(self: BC, include_similarity: bool | None) -> BC: this cursor itself. """ - self._ensure_not_started() + self._ensure_idle() self._ensure_alive() self._include_similarity = include_similarity return self @@ -456,7 +427,7 @@ def include_sort_vector(self: BC, include_sort_vector: bool | None) -> BC: this cursor itself. """ - self._ensure_not_started() + self._ensure_idle() self._ensure_alive() self._include_sort_vector = include_sort_vector return self @@ -464,10 +435,26 @@ def include_sort_vector(self: BC, include_sort_vector: bool | None) -> BC: @property def retrieved(self) -> int: """ - The number of documents retrieved so far by the code consuming the cursor. + The number of documents consumed so far (by the code consuming the cursor). """ - return self._retrieved + the_warning = deprecation.DeprecatedWarning( + "the 'retrieved' property of Cursor objects", + deprecated_in="1.5.1", + removed_in="2.0.0", + details="Please use the 'consumed' property", + ) + warnings.warn(the_warning, stacklevel=2) + + return self.consumed + + @property + def consumed(self) -> int: + """ + The number of documents consumed so far (by the code consuming the cursor). + """ + + return self._consumed def rewind(self: BC) -> BC: """ @@ -477,9 +464,8 @@ def rewind(self: BC) -> BC: this cursor itself. """ - self._started = False - self._retrieved = 0 - self._alive = True + self._state = CursorState.IDLE + self._consumed = 0 self._iterator = None return self @@ -498,7 +484,7 @@ def skip(self: BC, skip: int | None) -> BC: `sort` criterion of the ascending/descending type (i.e. it cannot be used when not sorting, nor with vector-based ANN search). """ - self._ensure_not_started() + self._ensure_idle() self._ensure_alive() self._skip = skip return self @@ -530,7 +516,7 @@ def sort( a command such as `.distinct()` on a cursor. """ - self._ensure_not_started() + self._ensure_idle() self._ensure_alive() self._sort = sort return self @@ -595,9 +581,8 @@ def __init__( self._include_similarity: bool | None = None self._include_sort_vector: bool | None = None self._sort: dict[str, Any] | None = None - self._started = False - self._retrieved = 0 - self._alive = True + self._state = CursorState.IDLE + self._consumed = 0 # self._iterator: _LookAheadIterator | None = None self._api_response_status: dict[str, Any] | None = None @@ -606,7 +591,7 @@ def __iter__(self) -> Cursor: self._ensure_alive() if self._iterator is None: self._iterator = self._create_iterator() - self._started = True + self._state = CursorState.STARTED return self def __next__(self) -> DocumentType: @@ -615,7 +600,7 @@ def __next__(self) -> DocumentType: raise StopIteration if self._iterator is None: self._iterator = self._create_iterator() - self._started = True + self._state = CursorState.STARTED # check for overall timing out if self._overall_max_time_ms is not None: _elapsed = time.time() - self._started_time_s # type: ignore[operator] @@ -628,10 +613,10 @@ def __next__(self) -> DocumentType: ) try: next_item = self._iterator.__next__() - self._retrieved = self._retrieved + 1 + self._consumed = self._consumed + 1 return next_item except StopIteration: - self._alive = False + self.close() raise def get_sort_vector(self) -> list[float] | None: @@ -646,7 +631,7 @@ def get_sort_vector(self) -> list[float] | None: if self._iterator is None: self._iterator = self._create_iterator() - self._started = True + self._state = CursorState.STARTED self._iterator.preread() if self._api_response_status: return self._api_response_status.get("sortVector") @@ -662,7 +647,7 @@ def _item_at_index(self, index: int) -> DocumentType: raise IndexError("no such item for Cursor instance") def _create_iterator(self) -> _LookAheadIterator: - self._ensure_not_started() + self._ensure_idle() self._ensure_alive() _options_0 = { k: v @@ -756,6 +741,22 @@ def collection(self) -> Collection: The (synchronous) collection this cursor is targeting. """ + the_warning = deprecation.DeprecatedWarning( + "the 'collection' property of Cursor objects", + deprecated_in="1.5.1", + removed_in="2.0.0", + details="Please use the 'data_source' property", + ) + warnings.warn(the_warning, stacklevel=2) + + return self.data_source + + @property + def data_source(self) -> Collection: + """ + The (synchronous) collection this cursor is targeting. + """ + return self._collection def distinct(self, key: str, max_time_ms: int | None = None) -> list[Any]: @@ -800,7 +801,6 @@ def distinct(self, key: str, max_time_ms: int | None = None) -> list[Any]: d_cursor = self._copy( projection={_key: True}, - started=False, overall_max_time_ms=max_time_ms, ) logger.info(f"running distinct() on '{self._collection.name}'") @@ -874,9 +874,8 @@ def __init__( self._include_similarity: bool | None = None self._include_sort_vector: bool | None = None self._sort: dict[str, Any] | None = None - self._started = False - self._retrieved = 0 - self._alive = True + self._state = CursorState.IDLE + self._consumed = 0 # self._iterator: _AsyncLookAheadIterator | None = None self._api_response_status: dict[str, Any] | None = None @@ -885,7 +884,7 @@ def __aiter__(self) -> AsyncCursor: self._ensure_alive() if self._iterator is None: self._iterator = self._create_iterator() - self._started = True + self._state = CursorState.STARTED return self async def __anext__(self) -> DocumentType: @@ -894,7 +893,7 @@ async def __anext__(self) -> DocumentType: raise StopAsyncIteration if self._iterator is None: self._iterator = self._create_iterator() - self._started = True + self._state = CursorState.STARTED # check for overall timing out if self._overall_max_time_ms is not None: _elapsed = time.time() - self._started_time_s # type: ignore[operator] @@ -907,10 +906,10 @@ async def __anext__(self) -> DocumentType: ) try: next_item = await self._iterator.__anext__() - self._retrieved = self._retrieved + 1 + self._consumed = self._consumed + 1 return next_item except StopAsyncIteration: - self._alive = False + self.close() raise async def get_sort_vector(self) -> list[float] | None: @@ -925,7 +924,7 @@ async def get_sort_vector(self) -> list[float] | None: if self._iterator is None: self._iterator = self._create_iterator() - self._started = True + self._state = CursorState.STARTED await self._iterator.preread() if self._api_response_status: return self._api_response_status.get("sortVector") @@ -941,7 +940,7 @@ def _item_at_index(self, index: int) -> DocumentType: raise IndexError("no such item for AsyncCursor instance") def _create_iterator(self) -> _AsyncLookAheadIterator: - self._ensure_not_started() + self._ensure_idle() self._ensure_alive() _options_0 = { k: v @@ -1034,7 +1033,6 @@ def _to_sync( skip: int | None = None, include_similarity: bool | None = None, include_sort_vector: bool | None = None, - started: bool | None = None, sort: dict[str, Any] | None = None, ) -> Cursor: new_cursor = Cursor( @@ -1057,14 +1055,7 @@ def _to_sync( if include_sort_vector is not None else self._include_sort_vector ) - new_cursor._started = started if started is not None else self._started new_cursor._sort = sort if sort is not None else self._sort - if started is False: - new_cursor._retrieved = 0 - new_cursor._alive = True - else: - new_cursor._retrieved = self._retrieved - new_cursor._alive = self._alive return new_cursor @property @@ -1073,6 +1064,22 @@ def collection(self) -> AsyncCollection: The (asynchronous) collection this cursor is targeting. """ + the_warning = deprecation.DeprecatedWarning( + "the 'collection' property of Cursor objects", + deprecated_in="1.5.1", + removed_in="2.0.0", + details="Please use the 'data_source' property", + ) + warnings.warn(the_warning, stacklevel=2) + + return self.data_source + + @property + def data_source(self) -> AsyncCollection: + """ + The (asynchronous) collection this cursor is targeting. + """ + return self._collection async def distinct(self, key: str, max_time_ms: int | None = None) -> list[Any]: @@ -1111,7 +1118,6 @@ async def distinct(self, key: str, max_time_ms: int | None = None) -> list[Any]: d_cursor = self._copy( projection={_key: True}, - started=False, overall_max_time_ms=max_time_ms, ) logger.info(f"running distinct() on '{self._collection.name}'") diff --git a/astrapy/database.py b/astrapy/database.py index 0f5705af..ef4a8239 100644 --- a/astrapy/database.py +++ b/astrapy/database.py @@ -17,7 +17,7 @@ import logging import warnings from types import TracebackType -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Sequence import deprecation @@ -30,7 +30,7 @@ coerce_token_provider, redact_secret, ) -from astrapy.constants import Environment +from astrapy.constants import CallerType, Environment from astrapy.cursors import AsyncCommandCursor, CommandCursor from astrapy.defaults import ( API_PATH_ENV_MAP, @@ -38,6 +38,7 @@ DEFAULT_ASTRA_DB_KEYSPACE, DEFAULT_DATA_API_AUTH_HEADER, NAMESPACE_DEPRECATION_NOTICE_METHOD, + SET_CALLER_DEPRECATION_NOTICE, ) from astrapy.exceptions import ( CollectionAlreadyExistsException, @@ -51,7 +52,7 @@ CollectionVectorServiceOptions, DatabaseInfo, ) -from astrapy.meta import check_namespace_keyspace +from astrapy.meta import check_caller_parameters, check_namespace_keyspace if TYPE_CHECKING: from astrapy.admin import DatabaseAdmin @@ -145,9 +146,14 @@ class Database: most operations are unavailable until a keyspace is set (through an explicit `use_keyspace` invocation or equivalent). namespace: an alias for `keyspace`. *DEPRECATED*, removal in 2.0. - caller_name: name of the application, or framework, on behalf of which - the Data API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which the Data API calls are performed. These end up + in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. Removal 2.0. environment: a string representing the target Data API environment. It can be left unspecified for the default value of `Environment.PROD`; other values include `Environment.OTHER`, `Environment.DSE`. @@ -176,12 +182,14 @@ def __init__( *, keyspace: str | None = None, namespace: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, environment: str | None = None, api_path: str | None = None, api_version: str | None = None, ) -> None: + callers_param = check_caller_parameters(callers, caller_name, caller_version) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -214,8 +222,7 @@ def __init__( DEFAULT_DATA_API_AUTH_HEADER: self.token_provider.get_token(), } - self.caller_name = caller_name - self.caller_version = caller_version + self.callers = callers_param self._api_commander = self._get_api_commander(keyspace=self.keyspace) self._name: str | None = None @@ -249,8 +256,7 @@ def __eq__(self, other: Any) -> bool: self.api_path == other.api_path, self.api_version == other.api_version, self.keyspace == other.keyspace, - self.caller_name == other.caller_name, - self.caller_version == other.caller_version, + self.callers == other.callers, self.api_commander == other.api_commander, ] ) @@ -282,7 +288,7 @@ def _get_api_commander(self, keyspace: str | None) -> APICommander | None: api_endpoint=self.api_endpoint, path=base_path, headers=self._commander_headers, - callers=[(self.caller_name, self.caller_version)], + callers=self.callers, ) return api_commander @@ -310,12 +316,14 @@ def _copy( token: str | TokenProvider | None = None, keyspace: str | None = None, namespace: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, environment: str | None = None, api_path: str | None = None, api_version: str | None = None, ) -> Database: + callers_param = check_caller_parameters(callers, caller_name, caller_version) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -324,8 +332,7 @@ def _copy( api_endpoint=api_endpoint or self.api_endpoint, token=coerce_token_provider(token) or self.token_provider, keyspace=keyspace_param or self.keyspace, - caller_name=caller_name or self.caller_name, - caller_version=caller_version or self.caller_version, + callers=callers_param or self.callers, environment=environment or self.environment, api_path=api_path or self.api_path, api_version=api_version or self.api_version, @@ -336,6 +343,7 @@ def with_options( *, keyspace: str | None = None, namespace: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> Database: @@ -347,9 +355,15 @@ def with_options( one is explicitly specified in the call. If no keyspace is supplied when creating a Database, the name "default_keyspace" is set. namespace: an alias for `keyspace`. *DEPRECATED*, removal in 2.0. - caller_name: name of the application, or framework, on behalf of which - the Data API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which the Data API calls are performed. These end up + in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. + Removal 2.0. Returns: a new `Database` instance. @@ -357,19 +371,18 @@ def with_options( Example: >>> my_db_2 = my_db.with_options( ... keyspace="the_other_keyspace", - ... caller_name="the_caller", - ... caller_version="0.1.0", + ... callers=[("the_caller", "0.1.0")], ... ) """ + callers_param = check_caller_parameters(callers, caller_name, caller_version) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, ) return self._copy( keyspace=keyspace_param, - caller_name=caller_name, - caller_version=caller_version, + callers=callers_param, ) def to_async( @@ -379,6 +392,7 @@ def to_async( token: str | TokenProvider | None = None, keyspace: str | None = None, namespace: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, environment: str | None = None, @@ -400,9 +414,15 @@ def to_async( one is explicitly specified in the call. If no keyspace is supplied when creating a Database, the name "default_keyspace" is set. namespace: an alias for `keyspace`. *DEPRECATED*, removal in 2.0. - caller_name: name of the application, or framework, on behalf of which - the Data API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which the Data API calls are performed. These end up + in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. + Removal 2.0. environment: a string representing the target Data API environment. Values are, for example, `Environment.PROD`, `Environment.OTHER`, or `Environment.DSE`. @@ -419,6 +439,7 @@ def to_async( >>> asyncio.run(my_async_db.list_collection_names()) """ + callers_param = check_caller_parameters(callers, caller_name, caller_version) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -427,13 +448,18 @@ def to_async( api_endpoint=api_endpoint or self.api_endpoint, token=coerce_token_provider(token) or self.token_provider, keyspace=keyspace_param or self.keyspace, - caller_name=caller_name or self.caller_name, - caller_version=caller_version or self.caller_version, + callers=callers_param or self.callers, environment=environment or self.environment, api_path=api_path or self.api_path, api_version=api_version or self.api_version, ) + @deprecation.deprecated( # type: ignore[misc] + deprecated_in="1.5.1", + removed_in="2.0.0", + current_version=__version__, + details=SET_CALLER_DEPRECATION_NOTICE, + ) def set_caller( self, caller_name: str | None = None, @@ -453,8 +479,8 @@ def set_caller( """ logger.info(f"setting caller to {caller_name}/{caller_version}") - self.caller_name = caller_name - self.caller_version = caller_version + callers_param = check_caller_parameters([], caller_name, caller_version) + self.callers = callers_param self._api_commander = self._get_api_commander(keyspace=self.keyspace) @deprecation.deprecated( # type: ignore[misc] @@ -1127,8 +1153,7 @@ def get_database_admin( api_endpoint=self.api_endpoint, token=coerce_token_provider(token) or self.token_provider, environment=self.environment, - caller_name=self.caller_name, - caller_version=self.caller_version, + callers=self.callers, dev_ops_url=dev_ops_url, dev_ops_api_version=dev_ops_api_version, spawner_database=self, @@ -1148,8 +1173,7 @@ def get_database_admin( environment=self.environment, api_path=self.api_path, api_version=self.api_version, - caller_name=self.caller_name, - caller_version=self.caller_version, + callers=self.callers, spawner_database=self, ) @@ -1180,9 +1204,14 @@ class AsyncDatabase: most operations are unavailable until a keyspace is set (through an explicit `use_keyspace` invocation or equivalent). namespace: an alias for `keyspace`. *DEPRECATED*, removal in 2.0. - caller_name: name of the application, or framework, on behalf of which - the Data API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which the Data API calls are performed. These end up + in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. Removal 2.0. environment: a string representing the target Data API environment. It can be left unspecified for the default value of `Environment.PROD`; other values include `Environment.OTHER`, `Environment.DSE`. @@ -1211,12 +1240,14 @@ def __init__( *, keyspace: str | None = None, namespace: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, environment: str | None = None, api_path: str | None = None, api_version: str | None = None, ) -> None: + callers_param = check_caller_parameters(callers, caller_name, caller_version) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -1249,8 +1280,7 @@ def __init__( DEFAULT_DATA_API_AUTH_HEADER: self.token_provider.get_token(), } - self.caller_name = caller_name - self.caller_version = caller_version + self.callers = callers_param self._api_commander = self._get_api_commander(keyspace=self.keyspace) self._name: str | None = None @@ -1284,8 +1314,7 @@ def __eq__(self, other: Any) -> bool: self.api_path == other.api_path, self.api_version == other.api_version, self.keyspace == other.keyspace, - self.caller_name == other.caller_name, - self.caller_version == other.caller_version, + self.callers == other.callers, self.api_commander == other.api_commander, ] ) @@ -1317,7 +1346,7 @@ def _get_api_commander(self, keyspace: str | None) -> APICommander | None: api_endpoint=self.api_endpoint, path=base_path, headers=self._commander_headers, - callers=[(self.caller_name, self.caller_version)], + callers=self.callers, ) return api_commander @@ -1361,12 +1390,14 @@ def _copy( token: str | TokenProvider | None = None, keyspace: str | None = None, namespace: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, environment: str | None = None, api_path: str | None = None, api_version: str | None = None, ) -> AsyncDatabase: + callers_param = check_caller_parameters(callers, caller_name, caller_version) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -1375,8 +1406,7 @@ def _copy( api_endpoint=api_endpoint or self.api_endpoint, token=coerce_token_provider(token) or self.token_provider, keyspace=keyspace_param or self.keyspace, - caller_name=caller_name or self.caller_name, - caller_version=caller_version or self.caller_version, + callers=callers_param or self.callers, environment=environment or self.environment, api_path=api_path or self.api_path, api_version=api_version or self.api_version, @@ -1387,6 +1417,7 @@ def with_options( *, keyspace: str | None = None, namespace: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, ) -> AsyncDatabase: @@ -1398,9 +1429,15 @@ def with_options( one is explicitly specified in the call. If no keyspace is supplied when creating a Database, the name "default_keyspace" is set. namespace: an alias for `keyspace`. *DEPRECATED*, removal in 2.0. - caller_name: name of the application, or framework, on behalf of which - the Data API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which the Data API calls are performed. These end up + in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. + Removal 2.0. Returns: a new `AsyncDatabase` instance. @@ -1408,11 +1445,11 @@ def with_options( Example: >>> my_async_db_2 = my_async_db.with_options( ... keyspace="the_other_keyspace", - ... caller_name="the_caller", - ... caller_version="0.1.0", + ... callers=[("the_caller", "0.1.0")], ... ) """ + callers_param = check_caller_parameters(callers, caller_name, caller_version) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -1420,8 +1457,7 @@ def with_options( return self._copy( keyspace=keyspace_param, - caller_name=caller_name, - caller_version=caller_version, + callers=callers_param, ) def to_sync( @@ -1431,6 +1467,7 @@ def to_sync( token: str | TokenProvider | None = None, keyspace: str | None = None, namespace: str | None = None, + callers: Sequence[CallerType] = [], caller_name: str | None = None, caller_version: str | None = None, environment: str | None = None, @@ -1452,9 +1489,15 @@ def to_sync( one is explicitly specified in the call. If no keyspace is supplied when creating a Database, the name "default_keyspace" is set. namespace: an alias for `keyspace`. *DEPRECATED*, removal in 2.0. - caller_name: name of the application, or framework, on behalf of which - the Data API calls are performed. This ends up in the request user-agent. - caller_version: version of the caller. + callers: a list of caller identities, i.e. applications, or frameworks, + on behalf of which the Data API calls are performed. These end up + in the request user-agent. + Each caller identity is a ("caller_name", "caller_version") pair. + caller_name: *DEPRECATED*, use `callers`. Removal 2.0. Name of the + application, or framework, on behalf of which the Data API calls + are performed. This ends up in the request user-agent. + caller_version: version of the caller. *DEPRECATED*, use `callers`. + Removal 2.0. environment: a string representing the target Data API environment. Values are, for example, `Environment.PROD`, `Environment.OTHER`, or `Environment.DSE`. @@ -1472,6 +1515,7 @@ def to_sync( ['a_collection', 'another_collection'] """ + callers_param = check_caller_parameters(callers, caller_name, caller_version) keyspace_param = check_namespace_keyspace( keyspace=keyspace, namespace=namespace, @@ -1480,13 +1524,18 @@ def to_sync( api_endpoint=api_endpoint or self.api_endpoint, token=coerce_token_provider(token) or self.token_provider, keyspace=keyspace_param or self.keyspace, - caller_name=caller_name or self.caller_name, - caller_version=caller_version or self.caller_version, + callers=callers_param or self.callers, environment=environment or self.environment, api_path=api_path or self.api_path, api_version=api_version or self.api_version, ) + @deprecation.deprecated( # type: ignore[misc] + deprecated_in="1.5.1", + removed_in="2.0.0", + current_version=__version__, + details=SET_CALLER_DEPRECATION_NOTICE, + ) def set_caller( self, caller_name: str | None = None, @@ -1506,8 +1555,8 @@ def set_caller( """ logger.info(f"setting caller to {caller_name}/{caller_version}") - self.caller_name = caller_name - self.caller_version = caller_version + callers_param = check_caller_parameters([], caller_name, caller_version) + self.callers = callers_param self._api_commander = self._get_api_commander(keyspace=self.keyspace) @deprecation.deprecated( # type: ignore[misc] @@ -2192,8 +2241,7 @@ def get_database_admin( api_endpoint=self.api_endpoint, token=coerce_token_provider(token) or self.token_provider, environment=self.environment, - caller_name=self.caller_name, - caller_version=self.caller_version, + callers=self.callers, dev_ops_url=dev_ops_url, dev_ops_api_version=dev_ops_api_version, spawner_database=self, @@ -2213,7 +2261,6 @@ def get_database_admin( environment=self.environment, api_path=self.api_path, api_version=self.api_version, - caller_name=self.caller_name, - caller_version=self.caller_version, + callers=self.callers, spawner_database=self, ) diff --git a/astrapy/defaults.py b/astrapy/defaults.py index 5c44022a..3385f7fa 100644 --- a/astrapy/defaults.py +++ b/astrapy/defaults.py @@ -84,6 +84,7 @@ } DEV_OPS_RESPONSE_HTTP_ACCEPTED = 202 DEV_OPS_RESPONSE_HTTP_CREATED = 201 +DEV_OPS_DEFAULT_DATABASES_PAGE_SIZE = 25 # Settings for redacting secrets in string representations and logging SECRETS_REDACT_ENDING = "..." @@ -119,3 +120,13 @@ "See https://docs.datastax.com/en/astra-db-serverless/api-reference/client-" "versions.html#version-1-5 for more information." ) + +# Other deprecation notices +SET_CALLER_DEPRECATION_NOTICE = ( + "Please provide the caller(s) at constructor time through the " + "`callers` list parameter." +) +CALLER_NAME_VERSION_DEPRECATION_NOTICE_DETAILS = ( + "Please pass any caller identities as the `callers` constructor parameter: " + "`callers=[('cname1', 'cversion1'), ('cname2', 'cversion2'), ...]`." +) diff --git a/astrapy/meta.py b/astrapy/meta.py index 6d704997..aadfc590 100644 --- a/astrapy/meta.py +++ b/astrapy/meta.py @@ -15,11 +15,13 @@ from __future__ import annotations import warnings -from typing import Any +from typing import Any, Sequence from deprecation import DeprecatedWarning +from astrapy.constants import CallerType from astrapy.defaults import ( + CALLER_NAME_VERSION_DEPRECATION_NOTICE_DETAILS, NAMESPACE_DEPRECATION_NOTICE_NS_DETAILS, NAMESPACE_DEPRECATION_NOTICE_NS_SUBJECT, NAMESPACE_DEPRECATION_NOTICE_UPDATEDBNS_DETAILS, @@ -69,6 +71,61 @@ def check_deprecated_vector_ize( ) +def check_caller_parameters( + callers: Sequence[CallerType], + caller_name: str | None, + caller_version: str | None, +) -> Sequence[CallerType]: + if caller_name is None and caller_version is None: + return callers + else: + # issue a deprecation warning + the_warning = DeprecatedWarning( + "Use of parameters `caller_name` and `caller_version`", + deprecated_in="1.5.1", + removed_in="2.0.0", + details=CALLER_NAME_VERSION_DEPRECATION_NOTICE_DETAILS, + ) + warnings.warn( + the_warning, + stacklevel=3, + ) + if not callers: + return [(caller_name, caller_version)] + else: + msg = ( + "None of the deprecated `caller_name` and `caller_version` " + "parameters can be passed if the newer `callers` parameter is " + "also provided." + ) + raise ValueError(msg) + + +def check_deprecated_id_region( + id: str | None, + region: str | None, +) -> None: + # issue a deprecation warning if a database "id" is passed, + # possibly accompanied by a "region". + + if id is not None: + if region is None: + deprecation_subject = "Passing an `id` parameter" + else: + deprecation_subject = "Passing an `id` parameter with a `region`" + # issue a deprecation warning + the_warning = DeprecatedWarning( + deprecation_subject, + deprecated_in="1.5.1", + removed_in="2.0.0", + details="Please switch to using the API Endpoint.", + ) + warnings.warn( + the_warning, + stacklevel=3, + ) + + def check_namespace_keyspace( keyspace: str | None, namespace: str | None, diff --git a/astrapy/operations.py b/astrapy/operations.py index cb9de825..cfd6857b 100644 --- a/astrapy/operations.py +++ b/astrapy/operations.py @@ -46,10 +46,7 @@ def reduce_bulk_write_results(results: list[BulkWriteResult]) -> BulkWriteResult def _sum_results(r1: BulkWriteResult, r2: BulkWriteResult) -> BulkWriteResult: bulk_api_results = {**r1.bulk_api_results, **r2.bulk_api_results} - if r1.deleted_count is None or r2.deleted_count is None: - deleted_count = None - else: - deleted_count = r1.deleted_count + r2.deleted_count + deleted_count = r1.deleted_count + r2.deleted_count inserted_count = r1.inserted_count + r2.inserted_count matched_count = r1.matched_count + r2.matched_count modified_count = r1.modified_count + r2.modified_count diff --git a/astrapy/results.py b/astrapy/results.py index e2c5715c..e49e8272 100644 --- a/astrapy/results.py +++ b/astrapy/results.py @@ -51,7 +51,7 @@ class DeleteResult(OperationResult): list of raw responses can contain exactly one or a number of items. """ - deleted_count: int | None + deleted_count: int def __repr__(self) -> str: return self._piecewise_repr( @@ -202,7 +202,7 @@ class BulkWriteResult: """ bulk_api_results: dict[int, list[dict[str, Any]]] - deleted_count: int | None + deleted_count: int inserted_count: int matched_count: int modified_count: int diff --git a/astrapy/user_agents.py b/astrapy/user_agents.py index 9c9572a5..28f36e0a 100644 --- a/astrapy/user_agents.py +++ b/astrapy/user_agents.py @@ -14,28 +14,17 @@ from __future__ import annotations -from importlib import metadata -from importlib.metadata import PackageNotFoundError +from typing import Sequence from astrapy import __version__ +from astrapy.constants import CallerType -def detect_astrapy_user_agent() -> tuple[str | None, str | None]: +def detect_astrapy_user_agent() -> CallerType: package_name = __name__.split(".")[0] return (package_name, __version__) -def detect_ragstack_user_agent() -> tuple[str | None, str | None]: - try: - ragstack_meta = metadata.metadata("ragstack-ai") - if ragstack_meta: - ragstack_version = ragstack_meta["version"] - return ("ragstack", ragstack_version) - except PackageNotFoundError: - pass - return (None, None) - - def compose_user_agent_string( caller_name: str | None, caller_version: str | None ) -> str | None: @@ -48,7 +37,7 @@ def compose_user_agent_string( return None -def compose_full_user_agent(callers: list[tuple[str | None, str | None]]) -> str | None: +def compose_full_user_agent(callers: Sequence[CallerType]) -> str | None: user_agent_strings = [ ua_string for ua_string in ( diff --git a/poetry.lock b/poetry.lock index b290385c..0415a434 100644 --- a/poetry.lock +++ b/poetry.lock @@ -22,69 +22,6 @@ doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] trio = ["trio (>=0.26.1)"] -[[package]] -name = "cassandra-driver" -version = "3.29.2" -description = "DataStax Driver for Apache Cassandra" -optional = false -python-versions = "*" -files = [ - {file = "cassandra-driver-3.29.2.tar.gz", hash = "sha256:c4310a7d0457f51a63fb019d8ef501588c491141362b53097fbc62fa06559b7c"}, - {file = "cassandra_driver-3.29.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:957208093ff2353230d0d83edf8c8e8582e4f2999d9a33292be6558fec943562"}, - {file = "cassandra_driver-3.29.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d70353b6d9d6e01e2b261efccfe90ce0aa6f416588e6e626ca2ed0aff6b540cf"}, - {file = "cassandra_driver-3.29.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06ad489e4df2cc7f41d3aca8bd8ddeb8071c4fb98240ed07f1dcd9b5180fd879"}, - {file = "cassandra_driver-3.29.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7f1dfa33c3d93350057d6dc163bb92748b6e6a164c408c75cf2c59be0a203b7"}, - {file = "cassandra_driver-3.29.2-cp310-cp310-win32.whl", hash = "sha256:f9df1e6ae4201eb2eae899cb0649d46b3eb0843f075199b51360bc9d59679a31"}, - {file = "cassandra_driver-3.29.2-cp310-cp310-win_amd64.whl", hash = "sha256:c4a005bc0b4fd8b5716ad931e1cc788dbd45967b0bcbdc3dfde33c7f9fde40d4"}, - {file = "cassandra_driver-3.29.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e31cee01a6fc8cf7f32e443fa0031bdc75eed46126831b7a807ab167b4dc1316"}, - {file = "cassandra_driver-3.29.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:52edc6d4bd7d07b10dc08b7f044dbc2ebe24ad7009c23a65e0916faed1a34065"}, - {file = "cassandra_driver-3.29.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb3a9f24fc84324d426a69dc35df66de550833072a4d9a4d63d72fda8fcaecb9"}, - {file = "cassandra_driver-3.29.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e89de04809d02bb1d5d03c0946a7baaaf85e93d7e6414885b4ea2616efe9de0"}, - {file = "cassandra_driver-3.29.2-cp311-cp311-win32.whl", hash = "sha256:7104e5043e9cc98136d7fafe2418cbc448dacb4e1866fe38ff5be76f227437ef"}, - {file = "cassandra_driver-3.29.2-cp311-cp311-win_amd64.whl", hash = "sha256:69aa53f1bdb23487765faa92eef57366637878eafc412f46af999e722353b22f"}, - {file = "cassandra_driver-3.29.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a1e994a82b2e6ab022c5aec24e03ad49fca5f3d47e566a145de34eb0e768473a"}, - {file = "cassandra_driver-3.29.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2039201ae5d9b7c7ce0930af7138d2637ca16a4c7aaae2fbdd4355fbaf3003c5"}, - {file = "cassandra_driver-3.29.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8067fad22e76e250c3846507d804f90b53e943bba442fa1b26583bcac692aaf1"}, - {file = "cassandra_driver-3.29.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee0ebe8eb4fb007d8001ffcd1c3828b74defeb01075d8a1f1116ae9c60f75541"}, - {file = "cassandra_driver-3.29.2-cp312-cp312-win32.whl", hash = "sha256:83dc9399cdabe482fd3095ca54ec227212d8c491b563a7276f6c100e30ee856c"}, - {file = "cassandra_driver-3.29.2-cp312-cp312-win_amd64.whl", hash = "sha256:6c74610f56a4c53863a5d44a2af9c6c3405da19d51966fabd85d7f927d5c6abc"}, - {file = "cassandra_driver-3.29.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c86b0a796ff67d66de7df5f85243832a4dc853217f6a3eade84694f6f4fae151"}, - {file = "cassandra_driver-3.29.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c53700b0d1f8c1d777eaa9e9fb6d17839d9a83f27a61649e0cbaa15d9d3df34b"}, - {file = "cassandra_driver-3.29.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d348c769aa6c37919e7d6247e8cf09c23d387b7834a340408bd7d611f174d80"}, - {file = "cassandra_driver-3.29.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8c496318e3c136cf12ab21e1598fee4b48ea1c71746ea8cc9d32e4dcd09cb93"}, - {file = "cassandra_driver-3.29.2-cp38-cp38-win32.whl", hash = "sha256:d180183451bec81c15e0441fa37a63dc52c6489e860e832cadd854373b423141"}, - {file = "cassandra_driver-3.29.2-cp38-cp38-win_amd64.whl", hash = "sha256:a66b20c421d8fb21f18bd0ac713de6f09c5c25b6ab3d6043c3779b9c012d7c98"}, - {file = "cassandra_driver-3.29.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70d4d0dce373943308ad461a425fc70a23d0f524859367b8c6fc292400f39954"}, - {file = "cassandra_driver-3.29.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b86427fab4d5a96e91ad82bb9338d4101ae4d3758ba96c356e0198da3de4d350"}, - {file = "cassandra_driver-3.29.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c25b42e1a99f377a933d79ae93ea27601e337a5abb7bb843a0e951cf1b3836f7"}, - {file = "cassandra_driver-3.29.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36437288d6cd6f6c74b8ee5997692126e24adc2da3d031dc11c7dfea8bc220"}, - {file = "cassandra_driver-3.29.2-cp39-cp39-win32.whl", hash = "sha256:e967c1341a651f03bdc466f3835d72d3c0a0648b562035e6d780fa0b796c02f6"}, - {file = "cassandra_driver-3.29.2-cp39-cp39-win_amd64.whl", hash = "sha256:c5a9aab2367e8aad48ae853847a5a8985749ac5f102676de2c119b33fef13b42"}, -] - -[package.dependencies] -geomet = ">=0.1,<0.3" - -[package.extras] -cle = ["cryptography (>=35.0)"] -graph = ["gremlinpython (==3.4.6)"] - -[[package]] -name = "cassio" -version = "0.1.9" -description = "A framework-agnostic Python library to seamlessly integrate Apache Cassandra(R) with ML/LLM/genAI workloads." -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "cassio-0.1.9-py3-none-any.whl", hash = "sha256:0139d44d5bbd475df77806366c845465f6b08181c0e98ad9acec9f4047d6ab53"}, - {file = "cassio-0.1.9.tar.gz", hash = "sha256:5c3e5d15769396a98f0f260aead6a2c6e707ab1a13fe94f24341d5ef6bdddd6a"}, -] - -[package.dependencies] -cassandra-driver = ">=3.28.0,<4.0.0" -numpy = ">=1.0" -requests = ">=2.31.0,<3.0.0" - [[package]] name = "certifi" version = "2024.8.30" @@ -206,20 +143,6 @@ files = [ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - [[package]] name = "colorama" version = "0.4.6" @@ -430,21 +353,6 @@ docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2. testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] -[[package]] -name = "geomet" -version = "0.2.1.post1" -description = "GeoJSON <-> WKT/WKB conversion utilities" -optional = false -python-versions = ">2.6, !=3.3.*, <4" -files = [ - {file = "geomet-0.2.1.post1-py3-none-any.whl", hash = "sha256:a41a1e336b381416d6cbed7f1745c848e91defaa4d4c1bdc1312732e46ffad2b"}, - {file = "geomet-0.2.1.post1.tar.gz", hash = "sha256:91d754f7c298cbfcabd3befdb69c641c27fe75e808b27aa55028605761d17e95"}, -] - -[package.dependencies] -click = "*" -six = "*" - [[package]] name = "h11" version = "0.14.0" @@ -484,13 +392,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.6" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, ] [package.dependencies] @@ -501,7 +409,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" @@ -717,43 +625,6 @@ files = [ {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[[package]] -name = "numpy" -version = "1.24.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, - {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, - {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, - {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, - {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, - {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, - {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, - {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, - {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, - {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, - {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, -] - [[package]] name = "packaging" version = "24.1" @@ -816,70 +687,70 @@ virtualenv = ">=20.10.0" [[package]] name = "pymongo" -version = "4.10.0" +version = "4.10.1" description = "Python driver for MongoDB " optional = false python-versions = ">=3.8" files = [ - {file = "pymongo-4.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0197e0ae594d7b04bf1a012d9c6bbde6f0f5ccf1d88349fabeacd0b40b4334f5"}, - {file = "pymongo-4.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81b1454f361f27066552a931206a1562f72b34198eb2154c5c430d7b00b1c5f8"}, - {file = "pymongo-4.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:debfdbbed338fbdcf16774a8f7f9ab7dca80314946d31cc6ce9a23a87b0b4903"}, - {file = "pymongo-4.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0087eb80d7d253479b974e5b531a75433942bbdd3ab28c7c4677176815288c52"}, - {file = "pymongo-4.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b602e714575eba4cb521ad56eb5af55f0e44d723f23ae1d9357629a34b783281"}, - {file = "pymongo-4.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da6eff9c804c1fa178a965228d2c4787cf9a6e333df2cb9fa6beeb80b519845a"}, - {file = "pymongo-4.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4894e933088f2cd43a6891371a3f1c6a16f7acd8b4a736f4034536a7d46d17e5"}, - {file = "pymongo-4.10.0-cp310-cp310-win32.whl", hash = "sha256:5de901d39f0f014e2d54fca03e12e2d5ebbb5067c58577fc71a6caea3b244dbf"}, - {file = "pymongo-4.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:46bc15df824df877efa174f09b71ef87fb3838fe213d4fd95e35be7b2b6b21c7"}, - {file = "pymongo-4.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f4ebf41c7b219c3a155b675c4d0db66199af1c7c64370f5f9eb06004fd24d9b1"}, - {file = "pymongo-4.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6edb2c9fe5e78af017d1d7c251ed6460fd0764c2a74405c3d783eec92abfdf3d"}, - {file = "pymongo-4.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c62dcdc19af778fb8e7980fcef9e251b3e8a6395d466e0316470df72a7e30339"}, - {file = "pymongo-4.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b234f73a1198f49663bb4848b942bb8c78a31f4fe46cb559081c13662f556e1"}, - {file = "pymongo-4.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51c525337be3fc1d96d585ab981114962ae50b0ad43e8fcb8735d6493d42be0f"}, - {file = "pymongo-4.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3b3a2f6acac03ddfe774e3565bf34d752ca85198a5720538492e58b9747f2e2"}, - {file = "pymongo-4.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e86b0cc697b45098467c3c4dba6ec671090106d6ed09822e45cc5f614e7d2d7"}, - {file = "pymongo-4.10.0-cp311-cp311-win32.whl", hash = "sha256:0447e8d6341675d896c10c9f27770011575c659525f058d94451f54beebe85cf"}, - {file = "pymongo-4.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:6128ef22ea99a2c15e7909ef812cfcb1179b8ce858d8050259e5a614aac2d8cd"}, - {file = "pymongo-4.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:43cc579b1aad7381839baa36ec673d21afa7785d10babfb0470d8fadc4334636"}, - {file = "pymongo-4.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be12a4fa20813c04e66e25392fa5c48129fa29f724191148547bfb288fd46b08"}, - {file = "pymongo-4.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f50e85aaa04dcd55846a86512c56c720a386b06067b4ad4ba4ac6eb73a07cc4"}, - {file = "pymongo-4.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:252bf16d45f2049190994a9c17bdf3998ead44f1f4ceb97f6a149c25396ee054"}, - {file = "pymongo-4.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8328927e89d650175bca6974faf8830c08ea507a933b6ca8442cbb4f131c9b5"}, - {file = "pymongo-4.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f12f2a17ef849529a471bf23f7b8553d37197e8b5a671d03d83de97873e9e923"}, - {file = "pymongo-4.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee01c79e7193edd14f9d1f182b5d1a96de709085f946a112a6f652844fe65789"}, - {file = "pymongo-4.10.0-cp312-cp312-win32.whl", hash = "sha256:c880d7333d46801abb5050da80238a838a773bcd95a7045ea9ae8869928f32f0"}, - {file = "pymongo-4.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:4ac85ad92d2f237bd368626bd3794aa1b643da1016defe484b676f1d92e31801"}, - {file = "pymongo-4.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b7385eb45f6a6d3bb78182570d7d9bfecb9d041044dc0a75188937136a01fbf"}, - {file = "pymongo-4.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:864d9e0ff0d525d8bdae1b23e11f5c9a7e625866fb38c0f19e0d3d6800235722"}, - {file = "pymongo-4.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43ca69abb3012b512e9cf17a9da8a7fa5ba6a9a32a3cd00df8daffedeb2b32f6"}, - {file = "pymongo-4.10.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:889592aded274b970c2980a3460b51a2f9407724c02ebb37df006f7ff9354f9d"}, - {file = "pymongo-4.10.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96b8d8a070c0bad72e0c075bea1c6715fc26f402ccfb0040a6fdf6a2ae0c582d"}, - {file = "pymongo-4.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f1a4afb651edacb9f8a2da61c56d4b030f30073751b51b9fef66efd1e835ebe"}, - {file = "pymongo-4.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:088afbb90331a4c5d628351245d576bcb7eb2b3ab49eb7f648284de51566b3bb"}, - {file = "pymongo-4.10.0-cp313-cp313-win32.whl", hash = "sha256:33d89ba16e083fa596763b87ef8e54fbc54b7582245b12cfc59af29a4f578505"}, - {file = "pymongo-4.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:5c3da4af3f20c1c4558650b46e2d0c8a261bd825476372c07e72aca6144ee5b0"}, - {file = "pymongo-4.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f38902d3c306795451cd328f9e3f694da05ce95c460273965159b7891f303e71"}, - {file = "pymongo-4.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0c163e9b747ff2a44a3967fe7a1803809d114505447e744f2aba0b910da24ea7"}, - {file = "pymongo-4.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41c9f875ddda4733ec3ba1e1b95aa419fee3b34dd7b14d79677292f5871d0a2b"}, - {file = "pymongo-4.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0fa3cb722d7e59b24f58ddd6c75a9f7f1509edcdd9fee3799805b36eab523bfc"}, - {file = "pymongo-4.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ad5df86de544e44c3a296d0a165ff3d661b9245868e957d6be98340c8b406f7"}, - {file = "pymongo-4.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f102160b1f43085717851988b03a45cce2603a856065b2281f3f529da989668"}, - {file = "pymongo-4.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c1af5c689dd67a9b33e88bc4adb63e61d823108b26d81cbc0ab7373b467a048"}, - {file = "pymongo-4.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:32ebc28f2648b544742c812e26b020ac1b35a5c74be6fc9f96eeaad229713f52"}, - {file = "pymongo-4.10.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:49c09629a9a3218ff176d7cfb7cf2bbb96869be288b2581762f31eae25ae1b30"}, - {file = "pymongo-4.10.0-cp38-cp38-win32.whl", hash = "sha256:c4e270ec6d3210d76c5d1f8642d9f904a02d138ca11937a78cb742b56c14fa7c"}, - {file = "pymongo-4.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:7a4c61d613d597205d70f0514ba7a11b466d399cc501b43a42225cad79d4ad46"}, - {file = "pymongo-4.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b4e41fac6874678dd49b29b58ec8c11cfccdee4bd66dc54f96c5cec831b802ae"}, - {file = "pymongo-4.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e668a5d3ec7990276a7bfe5945f754036398088939367f5d0bbf85d0979cfd3"}, - {file = "pymongo-4.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a78aae1fc42bcf690e749fd8d14f1dfc1be664c058e49a05504ed9f4c53ef43"}, - {file = "pymongo-4.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:711bdf49126c5bd78db9db24c3788003f0d22b21ffcdbbee2e099ac2f72d7309"}, - {file = "pymongo-4.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ff01206b0c20ab462889bed31b44c327fbf018343ade8f40aca1093a651ee61"}, - {file = "pymongo-4.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50d52322bb3220f93d05f7e5e94d744b2bc9070fea6231eb3a0c9569064db558"}, - {file = "pymongo-4.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c253ca10460d641461fb302c03fabb30e80567719b739ddf07501ff7cb5508"}, - {file = "pymongo-4.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:71e9b0e623e8395595d359100dfee38d033c7a842db51ff2a13207650336c24a"}, - {file = "pymongo-4.10.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:10438c08eb05208cebb198b740921bfe0eee303e5e7283bc451f7cad76dba4b7"}, - {file = "pymongo-4.10.0-cp39-cp39-win32.whl", hash = "sha256:eec4dd90bd109ce5cc7a9ea0616b7c4211cd1e7a93733d2a8a0809060f1e9950"}, - {file = "pymongo-4.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:ba06fea288b08b9e44d177703f0c96a9fb17ff96e8d444be757bdec030b39f37"}, - {file = "pymongo-4.10.0.tar.gz", hash = "sha256:2b56e499e0066c4a21a26b451b10377f147c360aa318f49f8c640b7f588e8e8c"}, + {file = "pymongo-4.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e699aa68c4a7dea2ab5a27067f7d3e08555f8d2c0dc6a0c8c60cfd9ff2e6a4b1"}, + {file = "pymongo-4.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:70645abc714f06b4ad6b72d5bf73792eaad14e3a2cfe29c62a9c81ada69d9e4b"}, + {file = "pymongo-4.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae2fd94c9fe048c94838badcc6e992d033cb9473eb31e5710b3707cba5e8aee2"}, + {file = "pymongo-4.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ded27a4a5374dae03a92e084a60cdbcecd595306555bda553b833baf3fc4868"}, + {file = "pymongo-4.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ecc2455e3974a6c429687b395a0bc59636f2d6aedf5785098cf4e1f180f1c71"}, + {file = "pymongo-4.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920fee41f7d0259f5f72c1f1eb331bc26ffbdc952846f9bd8c3b119013bb52c"}, + {file = "pymongo-4.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0a15665b2d6cf364f4cd114d62452ce01d71abfbd9c564ba8c74dcd7bbd6822"}, + {file = "pymongo-4.10.1-cp310-cp310-win32.whl", hash = "sha256:29e1c323c28a4584b7095378ff046815e39ff82cdb8dc4cc6dfe3acf6f9ad1f8"}, + {file = "pymongo-4.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:88dc4aa45f8744ccfb45164aedb9a4179c93567bbd98a33109d7dc400b00eb08"}, + {file = "pymongo-4.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:57ee6becae534e6d47848c97f6a6dff69e3cce7c70648d6049bd586764febe59"}, + {file = "pymongo-4.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6f437a612f4d4f7aca1812311b1e84477145e950fdafe3285b687ab8c52541f3"}, + {file = "pymongo-4.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a970fd3117ab40a4001c3dad333bbf3c43687d90f35287a6237149b5ccae61d"}, + {file = "pymongo-4.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7c4d0e7cd08ef9f8fbf2d15ba281ed55604368a32752e476250724c3ce36c72e"}, + {file = "pymongo-4.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca6f700cff6833de4872a4e738f43123db34400173558b558ae079b5535857a4"}, + {file = "pymongo-4.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cec237c305fcbeef75c0bcbe9d223d1e22a6e3ba1b53b2f0b79d3d29c742b45b"}, + {file = "pymongo-4.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3337804ea0394a06e916add4e5fac1c89902f1b6f33936074a12505cab4ff05"}, + {file = "pymongo-4.10.1-cp311-cp311-win32.whl", hash = "sha256:778ac646ce6ac1e469664062dfe9ae1f5c9961f7790682809f5ec3b8fda29d65"}, + {file = "pymongo-4.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:9df4ab5594fdd208dcba81be815fa8a8a5d8dedaf3b346cbf8b61c7296246a7a"}, + {file = "pymongo-4.10.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fbedc4617faa0edf423621bb0b3b8707836687161210d470e69a4184be9ca011"}, + {file = "pymongo-4.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7bd26b2aec8ceeb95a5d948d5cc0f62b0eb6d66f3f4230705c1e3d3d2c04ec76"}, + {file = "pymongo-4.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb104c3c2a78d9d85571c8ac90ec4f95bca9b297c6eee5ada71fabf1129e1674"}, + {file = "pymongo-4.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4924355245a9c79f77b5cda2db36e0f75ece5faf9f84d16014c0a297f6d66786"}, + {file = "pymongo-4.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11280809e5dacaef4971113f0b4ff4696ee94cfdb720019ff4fa4f9635138252"}, + {file = "pymongo-4.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5d55f2a82e5eb23795f724991cac2bffbb1c0f219c0ba3bf73a835f97f1bb2e"}, + {file = "pymongo-4.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e974ab16a60be71a8dfad4e5afccf8dd05d41c758060f5d5bda9a758605d9a5d"}, + {file = "pymongo-4.10.1-cp312-cp312-win32.whl", hash = "sha256:544890085d9641f271d4f7a47684450ed4a7344d6b72d5968bfae32203b1bb7c"}, + {file = "pymongo-4.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:dcc07b1277e8b4bf4d7382ca133850e323b7ab048b8353af496d050671c7ac52"}, + {file = "pymongo-4.10.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:90bc6912948dfc8c363f4ead54d54a02a15a7fee6cfafb36dc450fc8962d2cb7"}, + {file = "pymongo-4.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:594dd721b81f301f33e843453638e02d92f63c198358e5a0fa8b8d0b1218dabc"}, + {file = "pymongo-4.10.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0783e0c8e95397c84e9cf8ab092ab1e5dd7c769aec0ef3a5838ae7173b98dea0"}, + {file = "pymongo-4.10.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6fb6a72e88df46d1c1040fd32cd2d2c5e58722e5d3e31060a0393f04ad3283de"}, + {file = "pymongo-4.10.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e3a593333e20c87415420a4fb76c00b7aae49b6361d2e2205b6fece0563bf40"}, + {file = "pymongo-4.10.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72e2ace7456167c71cfeca7dcb47bd5dceda7db2231265b80fc625c5e8073186"}, + {file = "pymongo-4.10.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ad05eb9c97e4f589ed9e74a00fcaac0d443ccd14f38d1258eb4c39a35dd722b"}, + {file = "pymongo-4.10.1-cp313-cp313-win32.whl", hash = "sha256:ee4c86d8e6872a61f7888fc96577b0ea165eb3bdb0d841962b444fa36001e2bb"}, + {file = "pymongo-4.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:45ee87a4e12337353242bc758accc7fb47a2f2d9ecc0382a61e64c8f01e86708"}, + {file = "pymongo-4.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:442ca247f53ad24870a01e80a71cd81b3f2318655fd9d66748ee2bd1b1569d9e"}, + {file = "pymongo-4.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:23e1d62df5592518204943b507be7b457fb8a4ad95a349440406fd42db5d0923"}, + {file = "pymongo-4.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6131bc6568b26e7495a9f3ef2b1700566b76bbecd919f4472bfe90038a61f425"}, + {file = "pymongo-4.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdeba88c540c9ed0338c0b2062d9f81af42b18d6646b3e6dda05cf6edd46ada9"}, + {file = "pymongo-4.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15a624d752dd3c89d10deb0ef6431559b6d074703cab90a70bb849ece02adc6b"}, + {file = "pymongo-4.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba164e73fdade9b4614a2497321c5b7512ddf749ed508950bdecc28d8d76a2d9"}, + {file = "pymongo-4.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9235fa319993405ae5505bf1333366388add2e06848db7b3deee8f990b69808e"}, + {file = "pymongo-4.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e4a65567bd17d19f03157c7ec992c6530eafd8191a4e5ede25566792c4fe3fa2"}, + {file = "pymongo-4.10.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f1945d48fb9b8a87d515da07f37e5b2c35b364a435f534c122e92747881f4a7c"}, + {file = "pymongo-4.10.1-cp38-cp38-win32.whl", hash = "sha256:345f8d340802ebce509f49d5833cc913da40c82f2e0daf9f60149cacc9ca680f"}, + {file = "pymongo-4.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:3a70d5efdc0387ac8cd50f9a5f379648ecfc322d14ec9e1ba8ec957e5d08c372"}, + {file = "pymongo-4.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15b1492cc5c7cd260229590be7218261e81684b8da6d6de2660cf743445500ce"}, + {file = "pymongo-4.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95207503c41b97e7ecc7e596d84a61f441b4935f11aa8332828a754e7ada8c82"}, + {file = "pymongo-4.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb99f003c720c6d83be02c8f1a7787c22384a8ca9a4181e406174db47a048619"}, + {file = "pymongo-4.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f2bc1ee4b1ca2c4e7e6b7a5e892126335ec8d9215bcd3ac2fe075870fefc3358"}, + {file = "pymongo-4.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:93a0833c10a967effcd823b4e7445ec491f0bf6da5de0ca33629c0528f42b748"}, + {file = "pymongo-4.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f56707497323150bd2ed5d63067f4ffce940d0549d4ea2dfae180deec7f9363"}, + {file = "pymongo-4.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:409ab7d6c4223e5c85881697f365239dd3ed1b58f28e4124b846d9d488c86880"}, + {file = "pymongo-4.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dac78a650dc0637d610905fd06b5fa6419ae9028cf4d04d6a2657bc18a66bbce"}, + {file = "pymongo-4.10.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1ec3fa88b541e0481aff3c35194c9fac96e4d57ec5d1c122376000eb28c01431"}, + {file = "pymongo-4.10.1-cp39-cp39-win32.whl", hash = "sha256:e0e961923a7b8a1c801c43552dcb8153e45afa41749d9efbd3a6d33f45489f7a"}, + {file = "pymongo-4.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:dabe8bf1ad644e6b93f3acf90ff18536d94538ca4d27e583c6db49889e98e48f"}, + {file = "pymongo-4.10.1.tar.gz", hash = "sha256:a9de02be53b6bb98efe0b9eda84ffa1ec027fcb23a2de62c4f941d9a2f2f3330"}, ] [package.dependencies] @@ -1011,25 +882,29 @@ cli = ["click (>=5.0)"] [[package]] name = "pywin32" -version = "306" +version = "307" description = "Python for Window Extensions" optional = false python-versions = "*" files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, + {file = "pywin32-307-cp310-cp310-win32.whl", hash = "sha256:f8f25d893c1e1ce2d685ef6d0a481e87c6f510d0f3f117932781f412e0eba31b"}, + {file = "pywin32-307-cp310-cp310-win_amd64.whl", hash = "sha256:36e650c5e5e6b29b5d317385b02d20803ddbac5d1031e1f88d20d76676dd103d"}, + {file = "pywin32-307-cp310-cp310-win_arm64.whl", hash = "sha256:0c12d61e0274e0c62acee79e3e503c312426ddd0e8d4899c626cddc1cafe0ff4"}, + {file = "pywin32-307-cp311-cp311-win32.whl", hash = "sha256:fec5d27cc893178fab299de911b8e4d12c5954e1baf83e8a664311e56a272b75"}, + {file = "pywin32-307-cp311-cp311-win_amd64.whl", hash = "sha256:987a86971753ed7fdd52a7fb5747aba955b2c7fbbc3d8b76ec850358c1cc28c3"}, + {file = "pywin32-307-cp311-cp311-win_arm64.whl", hash = "sha256:fd436897c186a2e693cd0437386ed79f989f4d13d6f353f8787ecbb0ae719398"}, + {file = "pywin32-307-cp312-cp312-win32.whl", hash = "sha256:07649ec6b01712f36debf39fc94f3d696a46579e852f60157a729ac039df0815"}, + {file = "pywin32-307-cp312-cp312-win_amd64.whl", hash = "sha256:00d047992bb5dcf79f8b9b7c81f72e0130f9fe4b22df613f755ab1cc021d8347"}, + {file = "pywin32-307-cp312-cp312-win_arm64.whl", hash = "sha256:b53658acbfc6a8241d72cc09e9d1d666be4e6c99376bc59e26cdb6223c4554d2"}, + {file = "pywin32-307-cp313-cp313-win32.whl", hash = "sha256:ea4d56e48dc1ab2aa0a5e3c0741ad6e926529510516db7a3b6981a1ae74405e5"}, + {file = "pywin32-307-cp313-cp313-win_amd64.whl", hash = "sha256:576d09813eaf4c8168d0bfd66fb7cb3b15a61041cf41598c2db4a4583bf832d2"}, + {file = "pywin32-307-cp313-cp313-win_arm64.whl", hash = "sha256:b30c9bdbffda6a260beb2919f918daced23d32c79109412c2085cbc513338a0a"}, + {file = "pywin32-307-cp37-cp37m-win32.whl", hash = "sha256:5101472f5180c647d4525a0ed289ec723a26231550dbfd369ec19d5faf60e511"}, + {file = "pywin32-307-cp37-cp37m-win_amd64.whl", hash = "sha256:05de55a7c110478dc4b202230e98af5e0720855360d2b31a44bb4e296d795fba"}, + {file = "pywin32-307-cp38-cp38-win32.whl", hash = "sha256:13d059fb7f10792542082f5731d5d3d9645320fc38814759313e5ee97c3fac01"}, + {file = "pywin32-307-cp38-cp38-win_amd64.whl", hash = "sha256:7e0b2f93769d450a98ac7a31a087e07b126b6d571e8b4386a5762eb85325270b"}, + {file = "pywin32-307-cp39-cp39-win32.whl", hash = "sha256:55ee87f2f8c294e72ad9d4261ca423022310a6e79fb314a8ca76ab3f493854c6"}, + {file = "pywin32-307-cp39-cp39-win_amd64.whl", hash = "sha256:e9d5202922e74985b037c9ef46778335c102b74b95cec70f629453dbe7235d87"}, ] [[package]] @@ -1117,29 +992,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.6.8" +version = "0.6.9" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.6.8-py3-none-linux_armv6l.whl", hash = "sha256:77944bca110ff0a43b768f05a529fecd0706aac7bcce36d7f1eeb4cbfca5f0f2"}, - {file = "ruff-0.6.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:27b87e1801e786cd6ede4ada3faa5e254ce774de835e6723fd94551464c56b8c"}, - {file = "ruff-0.6.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:cd48f945da2a6334f1793d7f701725a76ba93bf3d73c36f6b21fb04d5338dcf5"}, - {file = "ruff-0.6.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:677e03c00f37c66cea033274295a983c7c546edea5043d0c798833adf4cf4c6f"}, - {file = "ruff-0.6.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f1476236b3eacfacfc0f66aa9e6cd39f2a624cb73ea99189556015f27c0bdeb"}, - {file = "ruff-0.6.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f5a2f17c7d32991169195d52a04c95b256378bbf0de8cb98478351eb70d526f"}, - {file = "ruff-0.6.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5fd0d4b7b1457c49e435ee1e437900ced9b35cb8dc5178921dfb7d98d65a08d0"}, - {file = "ruff-0.6.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8034b19b993e9601f2ddf2c517451e17a6ab5cdb1c13fdff50c1442a7171d87"}, - {file = "ruff-0.6.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6cfb227b932ba8ef6e56c9f875d987973cd5e35bc5d05f5abf045af78ad8e098"}, - {file = "ruff-0.6.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef0411eccfc3909269fed47c61ffebdcb84a04504bafa6b6df9b85c27e813b0"}, - {file = "ruff-0.6.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:007dee844738c3d2e6c24ab5bc7d43c99ba3e1943bd2d95d598582e9c1b27750"}, - {file = "ruff-0.6.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ce60058d3cdd8490e5e5471ef086b3f1e90ab872b548814e35930e21d848c9ce"}, - {file = "ruff-0.6.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1085c455d1b3fdb8021ad534379c60353b81ba079712bce7a900e834859182fa"}, - {file = "ruff-0.6.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:70edf6a93b19481affd287d696d9e311388d808671bc209fb8907b46a8c3af44"}, - {file = "ruff-0.6.8-py3-none-win32.whl", hash = "sha256:792213f7be25316f9b46b854df80a77e0da87ec66691e8f012f887b4a671ab5a"}, - {file = "ruff-0.6.8-py3-none-win_amd64.whl", hash = "sha256:ec0517dc0f37cad14a5319ba7bba6e7e339d03fbf967a6d69b0907d61be7a263"}, - {file = "ruff-0.6.8-py3-none-win_arm64.whl", hash = "sha256:8d3bb2e3fbb9875172119021a13eed38849e762499e3cfde9588e4b4d70968dc"}, - {file = "ruff-0.6.8.tar.gz", hash = "sha256:a5bf44b1aa0adaf6d9d20f86162b34f7c593bfedabc51239953e446aefc8ce18"}, + {file = "ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd"}, + {file = "ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec"}, + {file = "ruff-0.6.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645d7d8761f915e48a00d4ecc3686969761df69fb561dd914a773c1a8266e14e"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eae02b700763e3847595b9d2891488989cac00214da7f845f4bcf2989007d577"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d5ccc9e58112441de8ad4b29dcb7a86dc25c5f770e3c06a9d57e0e5eba48829"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:417b81aa1c9b60b2f8edc463c58363075412866ae4e2b9ab0f690dc1e87ac1b5"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c866b631f5fbce896a74a6e4383407ba7507b815ccc52bcedabb6810fdb3ef7"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b118afbb3202f5911486ad52da86d1d52305b59e7ef2031cea3425142b97d6f"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3ef0cc774b00fec123f635ce5c547dac263f6ee9fb9cc83437c5904183b55ceb"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:12edd2af0c60fa61ff31cefb90aef4288ac4d372b4962c2864aeea3a1a2460c0"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:55bb01caeaf3a60b2b2bba07308a02fca6ab56233302406ed5245180a05c5625"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:925d26471fa24b0ce5a6cdfab1bb526fb4159952385f386bdcc643813d472039"}, + {file = "ruff-0.6.9-py3-none-win32.whl", hash = "sha256:eb61ec9bdb2506cffd492e05ac40e5bc6284873aceb605503d8494180d6fc84d"}, + {file = "ruff-0.6.9-py3-none-win_amd64.whl", hash = "sha256:785d31851c1ae91f45b3d8fe23b8ae4b5170089021fbb42402d811135f0b7117"}, + {file = "ruff-0.6.9-py3-none-win_arm64.whl", hash = "sha256:a9641e31476d601f83cd602608739a0840e348bda93fec9f1ee816f8b6798b93"}, + {file = "ruff-0.6.9.tar.gz", hash = "sha256:b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2"}, ] [[package]] @@ -1210,13 +1085,13 @@ files = [ [[package]] name = "tomli" -version = "2.0.1" +version = "2.0.2" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, ] [[package]] @@ -1388,4 +1263,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "d0d2f63979d3bd7f470305617e8d5e9912c04dd2f5673fcf1470c7d439699b12" +content-hash = "2b8f685671cd13aa78a1ea410b5a25a6f5284786cf55e3c802d4e2c11f8600d8" diff --git a/pyproject.toml b/pyproject.toml index 90723269..c3bc8413 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ requires-python = ">=3.8" [tool.poetry] name = "astrapy" -version = "1.5.0" +version = "1.5.1" description = "AstraPy is a Pythonic SDK for DataStax Astra and its Data API" authors = [ "Stefano Lottini ", @@ -31,7 +31,6 @@ classifiers = [ [tool.poetry.dependencies] python = "^3.8.0" -cassio = "~0.1.4" deprecation = "~2.1.0" toml = "^0.10.2" uuid6 = ">=2024.1.12" diff --git a/tests/idiomatic/integration/test_ddl_sync.py b/tests/idiomatic/integration/test_ddl_sync.py index b62a83e5..1e199bf4 100644 --- a/tests/idiomatic/integration/test_ddl_sync.py +++ b/tests/idiomatic/integration/test_ddl_sync.py @@ -19,7 +19,7 @@ import pytest from astrapy import Collection, DataAPIClient, Database -from astrapy.admin import AstraDBDatabaseAdmin, parse_api_endpoint +from astrapy.admin import parse_api_endpoint from astrapy.constants import DefaultIdType, VectorMetric from astrapy.ids import UUID, ObjectId from astrapy.info import CollectionDescriptor, DatabaseInfo @@ -405,17 +405,6 @@ def test_autoregion_getdatabase_sync( data_api_credentials_kwargs["api_endpoint"], keyspace="the_ks" ) - # auto-region for the init of AstraDBDatabaseAdmin - assert AstraDBDatabaseAdmin( - data_api_credentials_kwargs["api_endpoint"], - token=data_api_credentials_kwargs["token"], - environment=data_api_credentials_info["environment"], - ) == AstraDBDatabaseAdmin( - parsed_api_endpoint.database_id, - token=data_api_credentials_kwargs["token"], - environment=data_api_credentials_info["environment"], - ) - @pytest.mark.skipif(not IS_ASTRA_DB, reason="Not supported outside of Astra DB") @pytest.mark.describe( "test database-from-admin default keyspace per environment, sync" diff --git a/tests/idiomatic/integration/test_dml_async.py b/tests/idiomatic/integration/test_dml_async.py index 46a1bb3b..be856eae 100644 --- a/tests/idiomatic/integration/test_dml_async.py +++ b/tests/idiomatic/integration/test_dml_async.py @@ -460,12 +460,22 @@ async def test_collection_cursors_async( # projection cursor0 = async_empty_collection.find(projection={"ternary": False}) + assert cursor0.consumed == 0 + with pytest.warns(DeprecationWarning): + assert cursor0.retrieved == 0 document0 = await cursor0.__anext__() + assert cursor0.consumed == 1 + with pytest.warns(DeprecationWarning): + assert cursor0.retrieved == 1 assert "ternary" not in document0 cursor0b = async_empty_collection.find(projection={"ternary": True}) document0b = await cursor0b.__anext__() assert "ternary" in document0b + assert cursor0b.data_source == async_empty_collection + with pytest.warns(DeprecationWarning): + assert cursor0b.collection == async_empty_collection + async def _alist(acursor: AsyncCursor) -> list[DocumentType]: return [doc async for doc in acursor] diff --git a/tests/idiomatic/integration/test_dml_sync.py b/tests/idiomatic/integration/test_dml_sync.py index fa85feb1..40dc1801 100644 --- a/tests/idiomatic/integration/test_dml_sync.py +++ b/tests/idiomatic/integration/test_dml_sync.py @@ -397,12 +397,22 @@ def test_collection_cursors_sync( # projection cursor0 = sync_empty_collection.find(projection={"ternary": False}) + assert cursor0.consumed == 0 + with pytest.warns(DeprecationWarning): + assert cursor0.retrieved == 0 document0 = cursor0.__next__() + assert cursor0.consumed == 1 + with pytest.warns(DeprecationWarning): + assert cursor0.retrieved == 1 assert "ternary" not in document0 cursor0b = sync_empty_collection.find(projection={"ternary": True}) document0b = cursor0b.__next__() assert "ternary" in document0b + assert cursor0b.data_source == sync_empty_collection + with pytest.warns(DeprecationWarning): + assert cursor0b.collection == sync_empty_collection + # rewinding, slicing and retrieved cursor1 = sync_empty_collection.find(sort={"seq": 1}) cursor1.__next__() diff --git a/tests/idiomatic/integration/test_exceptions_async.py b/tests/idiomatic/integration/test_exceptions_async.py index 6477f024..65b36b79 100644 --- a/tests/idiomatic/integration/test_exceptions_async.py +++ b/tests/idiomatic/integration/test_exceptions_async.py @@ -18,7 +18,7 @@ from astrapy import AsyncCollection, AsyncDatabase from astrapy.constants import DocumentType -from astrapy.cursors import AsyncCursor +from astrapy.cursors import AsyncCursor, CursorState from astrapy.exceptions import ( BulkWriteException, CollectionAlreadyExistsException, @@ -357,12 +357,12 @@ async def test_cursor_custom_exceptions_async( await cur1.__anext__() with pytest.raises(CursorIsStartedException) as exc: cur1.limit(1) - assert exc.value.cursor_state == "running" + assert exc.value.cursor_state == CursorState.STARTED.value [doc async for doc in cur1] with pytest.raises(CursorIsStartedException) as exc: cur1.limit(1) - assert exc.value.cursor_state == "exhausted" + assert exc.value.cursor_state == CursorState.CLOSED.value @pytest.mark.describe("test of standard exceptions in cursors, async") async def test_cursor_standard_exceptions_async( diff --git a/tests/idiomatic/integration/test_exceptions_sync.py b/tests/idiomatic/integration/test_exceptions_sync.py index ceee5ed7..bdfb10b8 100644 --- a/tests/idiomatic/integration/test_exceptions_sync.py +++ b/tests/idiomatic/integration/test_exceptions_sync.py @@ -17,6 +17,7 @@ import pytest from astrapy import Collection, Database +from astrapy.cursors import CursorState from astrapy.exceptions import ( BulkWriteException, CollectionAlreadyExistsException, @@ -345,12 +346,12 @@ def test_cursor_custom_exceptions_sync( cur1.__next__() with pytest.raises(CursorIsStartedException) as exc: cur1.limit(1) - assert exc.value.cursor_state == "running" + assert exc.value.cursor_state == CursorState.STARTED.value list(cur1) with pytest.raises(CursorIsStartedException) as exc: cur1.limit(1) - assert exc.value.cursor_state == "exhausted" + assert exc.value.cursor_state == CursorState.CLOSED.value @pytest.mark.describe("test of standard exceptions in cursors, sync") def test_cursor_standard_exceptions_sync( diff --git a/tests/idiomatic/unit/test_admin_conversions.py b/tests/idiomatic/unit/test_admin_conversions.py index 3c692aed..3f6087e1 100644 --- a/tests/idiomatic/unit/test_admin_conversions.py +++ b/tests/idiomatic/unit/test_admin_conversions.py @@ -28,44 +28,113 @@ from astrapy.authentication import StaticTokenProvider, UsernamePasswordTokenProvider from astrapy.constants import Environment +from ..conftest import sync_fail_if_not_removed + +api_ep0123_dev = ( + "https://01234567-89ab-cdef-0123-456789abcdef-region.apps.astra-dev.datastax.com" +) +api_ep7777_dev = ( + "https://77777777-89ab-cdef-0123-456789abcdef-region.apps.astra-dev.datastax.com" +) +api_ep9999_test = ( + "https://99999999-89ab-cdef-0123-456789abcdef-region.apps.astra-test.datastax.com" +) + class TestAdminConversions: + @sync_fail_if_not_removed + @pytest.mark.describe( + "test of DataAPIClient set_caller and caller_name in conversions" + ) + def test_dataapiclient_deprecated_caller_in_conversions(self) -> None: + with pytest.warns(DeprecationWarning): + dac1 = DataAPIClient( + "t1", environment="dev", caller_name="cn", caller_version="cv" + ) + assert dac1 == DataAPIClient("t1", environment="dev", callers=[("cn", "cv")]) + with pytest.raises(ValueError, match="`caller_name` and `caller_version`"): + DataAPIClient( + "t1", + environment="dev", + callers=[("cn", "cv")], + caller_name="cn", + ) + with pytest.raises(ValueError, match="`caller_name` and `caller_version`"): + DataAPIClient( + "t1", + environment="dev", + callers=[("cn", "cv")], + caller_version="cv", + ) + dac2 = DataAPIClient( + "t1", + environment="dev", + callers=[("cn", "cv")], + ) + assert dac1 == dac2 + + with pytest.warns(DeprecationWarning): + assert dac1 != dac1._copy(caller_name="x", caller_version="x") + with pytest.warns(DeprecationWarning): + assert dac1 != dac1._copy(caller_name="x") + with pytest.warns(DeprecationWarning): + assert dac1 != dac1._copy(caller_version="x") + + with pytest.warns(DeprecationWarning): + assert dac1 == dac1._copy(caller_name="x", caller_version="x")._copy( + caller_name="cn", caller_version="cv" + ) + + with pytest.warns(DeprecationWarning): + assert dac1 != dac1.with_options(caller_name="x", caller_version="x") + with pytest.warns(DeprecationWarning): + assert dac1 != dac1.with_options(caller_name="x") + with pytest.warns(DeprecationWarning): + assert dac1 != dac1.with_options(caller_version="x") + + with pytest.warns(DeprecationWarning): + assert dac1 == dac1.with_options( + caller_name="x", caller_version="x" + ).with_options(caller_name="cn", caller_version="cv") + + dac1b = dac1._copy() + dac1b.set_caller("cn2", "cv2") + assert dac1b != dac1 + dac1b.set_caller("cn", "cv") + assert dac1b == dac1 + @pytest.mark.describe("test of DataAPIClient conversions and comparison functions") def test_dataapiclient_conversions(self) -> None: + callers0 = [("cn", "cv"), ("dn", "dv")] + callers1 = [("x", "y")] dac1 = DataAPIClient( - "t1", environment="dev", caller_name="cn", caller_version="cv" + "t1", + environment="dev", + callers=callers0, ) dac2 = DataAPIClient( - "t1", environment="dev", caller_name="cn", caller_version="cv" + "t1", + environment="dev", + callers=callers0, ) assert dac1 == dac2 assert dac1 != dac1._copy(token="x") assert dac1 != dac1._copy(environment="test") - assert dac1 != dac1._copy(caller_name="x") - assert dac1 != dac1._copy(caller_version="x") + assert dac1 != dac1._copy(callers=callers1) assert dac1 == dac1._copy(token="x")._copy(token="t1") assert dac1 == dac1._copy(environment="test")._copy(environment="dev") - assert dac1 == dac1._copy(caller_name="x")._copy(caller_name="cn") - assert dac1 == dac1._copy(caller_version="x")._copy(caller_version="cv") + assert dac1 == dac1._copy(callers=callers1)._copy(callers=callers0) assert dac1 != dac1.with_options(token="x") - assert dac1 != dac1.with_options(caller_name="x") - assert dac1 != dac1.with_options(caller_version="x") + assert dac1 != dac1.with_options(callers=callers1) assert dac1 == dac1.with_options(token="x").with_options(token="t1") - assert dac1 == dac1.with_options(caller_name="x").with_options(caller_name="cn") - assert dac1 == dac1.with_options(caller_version="x").with_options( - caller_version="cv" + assert dac1 == dac1.with_options(callers=callers1).with_options( + callers=callers0 ) - dac1b = dac1._copy() - dac1b.set_caller("cn2", "cv2") - assert dac1b != dac1 - dac1b.set_caller("cn", "cv") - assert dac1b == dac1 - a_e_string = ( "https://01234567-89ab-cdef-0123-456789abcdef-us-east1" ".apps.astra-dev.datastax.com" @@ -75,8 +144,7 @@ def test_dataapiclient_conversions(self) -> None: expected_db_1 = Database( api_endpoint=a_e_string, token="t1", - caller_name="cn", - caller_version="cv", + callers=callers0, ) assert db1 == expected_db_1 with pytest.raises(httpx.HTTPStatusError): @@ -94,12 +162,12 @@ def test_dataapiclient_spawning_databases(self) -> None: client = DataAPIClient( token=token, environment=Environment.PROD, - caller_name="cn", - caller_version="cv", + callers=[("cn", "cv")], ) db1 = client.get_database(endpoint) - db2 = client.get_database(database_id, region=database_region) + with pytest.warns(DeprecationWarning): + db2 = client.get_database(database_id, region=database_region) db3 = client.get_database(endpoint) assert db1 == db2 @@ -108,21 +176,76 @@ def test_dataapiclient_spawning_databases(self) -> None: with pytest.raises(ValueError): client.get_database(endpoint, region=database_region) + @sync_fail_if_not_removed + @pytest.mark.describe( + "test of AstraDBAdmin set_caller and caller_name in conversions" + ) + def test_astradbadmin_deprecated_caller_in_conversions(self) -> None: + with pytest.warns(DeprecationWarning): + adm1 = AstraDBAdmin( + "t1", + environment="dev", + caller_name="cn", + caller_version="cv", + dev_ops_url="dou", + dev_ops_api_version="dvv", + ) + with pytest.warns(DeprecationWarning): + adm2 = AstraDBAdmin( + "t1", + environment="dev", + caller_name="cn", + caller_version="cv", + dev_ops_url="dou", + dev_ops_api_version="dvv", + ) + assert adm1 == adm2 + + with pytest.warns(DeprecationWarning): + assert adm1 != adm1._copy(caller_name="x", caller_version="x") + with pytest.warns(DeprecationWarning): + assert adm1 != adm1._copy(caller_version="x") + with pytest.warns(DeprecationWarning): + assert adm1 != adm1._copy(caller_name="x") + + with pytest.warns(DeprecationWarning): + assert adm1 == adm1._copy(caller_name="x", caller_version="x")._copy( + caller_name="cn", caller_version="cv" + ) + + with pytest.warns(DeprecationWarning): + assert adm1 != adm1.with_options(caller_name="x", caller_version="x") + with pytest.warns(DeprecationWarning): + assert adm1 != adm1.with_options(caller_name="x") + with pytest.warns(DeprecationWarning): + assert adm1 != adm1.with_options(caller_version="x") + + with pytest.warns(DeprecationWarning): + assert adm1 == adm1.with_options( + caller_name="x", caller_version="x" + ).with_options(caller_name="cn", caller_version="cv") + + adm1b = adm1._copy() + adm1b.set_caller("cn2", "cv2") + assert adm1b != adm1 + adm1b.set_caller("cn", "cv") + assert adm1b == adm1 + @pytest.mark.describe("test of AstraDBAdmin conversions and comparison functions") def test_astradbadmin_conversions(self) -> None: + callers0 = [("cn", "cv"), ("dn", "dv")] + callers1 = [("x", "y")] adm1 = AstraDBAdmin( "t1", environment="dev", - caller_name="cn", - caller_version="cv", + callers=callers0, dev_ops_url="dou", dev_ops_api_version="dvv", ) adm2 = AstraDBAdmin( "t1", environment="dev", - caller_name="cn", - caller_version="cv", + callers=callers0, dev_ops_url="dou", dev_ops_api_version="dvv", ) @@ -130,59 +253,108 @@ def test_astradbadmin_conversions(self) -> None: assert adm1 != adm1._copy(token="x") assert adm1 != adm1._copy(environment="test") - assert adm1 != adm1._copy(caller_name="x") - assert adm1 != adm1._copy(caller_version="x") + assert adm1 != adm1._copy(callers=callers1) assert adm1 != adm1._copy(dev_ops_url="x") assert adm1 != adm1._copy(dev_ops_api_version="x") assert adm1 == adm1._copy(token="x")._copy(token="t1") assert adm1 == adm1._copy(environment="test")._copy(environment="dev") - assert adm1 == adm1._copy(caller_name="x")._copy(caller_name="cn") - assert adm1 == adm1._copy(caller_version="x")._copy(caller_version="cv") + assert adm1 == adm1._copy(callers=callers1)._copy(callers=callers0) assert adm1 == adm1._copy(dev_ops_url="x")._copy(dev_ops_url="dou") assert adm1 == adm1._copy(dev_ops_api_version="x")._copy( dev_ops_api_version="dvv" ) assert adm1 != adm1.with_options(token="x") - assert adm1 != adm1.with_options(caller_name="x") - assert adm1 != adm1.with_options(caller_version="x") + assert adm1 != adm1.with_options(callers=callers1) assert adm1 == adm1.with_options(token="x").with_options(token="t1") - assert adm1 == adm1.with_options(caller_name="x").with_options(caller_name="cn") - assert adm1 == adm1.with_options(caller_version="x").with_options( - caller_version="cv" + assert adm1 == adm1.with_options(callers=callers1).with_options( + callers=callers0 ) - adm1b = adm1._copy() - adm1b.set_caller("cn2", "cv2") - assert adm1b != adm1 - adm1b.set_caller("cn", "cv") - assert adm1b == adm1 + @sync_fail_if_not_removed + @pytest.mark.describe( + "test of AstraDBDatabaseAdmin set_caller and caller_name in conversions" + ) + def test_astradbdatabaseadmin_deprecated_caller_in_conversions(self) -> None: + with pytest.warns(DeprecationWarning): + adda1 = AstraDBDatabaseAdmin( + api_ep0123_dev, + token="t1", + environment="dev", + caller_name="cn", + caller_version="cv", + dev_ops_url="dou", + dev_ops_api_version="dvv", + api_path="appi", + api_version="vX", + ) + with pytest.warns(DeprecationWarning): + adda2 = AstraDBDatabaseAdmin( + api_ep0123_dev, + token="t1", + environment="dev", + caller_name="cn", + caller_version="cv", + dev_ops_url="dou", + dev_ops_api_version="dvv", + api_path="appi", + api_version="vX", + ) + assert adda1 == adda2 + + with pytest.warns(DeprecationWarning): + assert adda1 != adda1._copy(caller_name="x", caller_version="x") + with pytest.warns(DeprecationWarning): + assert adda1 != adda1._copy(caller_name="x") + with pytest.warns(DeprecationWarning): + assert adda1 != adda1._copy(caller_version="x") + + with pytest.warns(DeprecationWarning): + assert adda1 == adda1._copy(caller_name="x", caller_version="x")._copy( + caller_name="cn", caller_version="cv" + ) + + with pytest.warns(DeprecationWarning): + assert adda1 != adda1.with_options(caller_name="x", caller_version="x") + with pytest.warns(DeprecationWarning): + assert adda1 != adda1.with_options(caller_name="x") + with pytest.warns(DeprecationWarning): + assert adda1 != adda1.with_options(caller_version="x") + + with pytest.warns(DeprecationWarning): + assert adda1 == adda1.with_options( + caller_name="x", caller_version="x" + ).with_options(caller_name="cn", caller_version="cv") + + adda1b = adda1._copy() + adda1b.set_caller("cn2", "cv2") + assert adda1b != adda1 + adda1b.set_caller("cn", "cv") + assert adda1b == adda1 @pytest.mark.describe( "test of AstraDBDatabaseAdmin conversions and comparison functions" ) def test_astradbdatabaseadmin_conversions(self) -> None: + callers0 = [("cn", "cv"), ("dn", "dv")] + callers1 = [("x", "y")] adda1 = AstraDBDatabaseAdmin( - "01234567-89ab-cdef-0123-456789abcdef", + api_ep0123_dev, token="t1", - region="reg", environment="dev", - caller_name="cn", - caller_version="cv", + callers=callers0, dev_ops_url="dou", dev_ops_api_version="dvv", api_path="appi", api_version="vX", ) adda2 = AstraDBDatabaseAdmin( - "01234567-89ab-cdef-0123-456789abcdef", + api_ep0123_dev, token="t1", - region="reg", environment="dev", - caller_name="cn", - caller_version="cv", + callers=callers0, dev_ops_url="dou", dev_ops_api_version="dvv", api_path="appi", @@ -190,25 +362,19 @@ def test_astradbdatabaseadmin_conversions(self) -> None: ) assert adda1 == adda2 - assert adda1 != adda1._copy(id="99999999-89ab-cdef-0123-456789abcdef") + assert adda1 != adda1._copy(api_ep9999_test, environment="test") assert adda1 != adda1._copy(token="x") - assert adda1 != adda1._copy(region="x") - assert adda1 != adda1._copy(environment="test") - assert adda1 != adda1._copy(caller_name="x") - assert adda1 != adda1._copy(caller_version="x") + assert adda1 != adda1._copy(callers=callers1) assert adda1 != adda1._copy(dev_ops_url="x") assert adda1 != adda1._copy(dev_ops_api_version="x") assert adda1 != adda1._copy(api_path="x") assert adda1 != adda1._copy(api_version="x") - assert adda1 == adda1._copy(id="99999999-89ab-cdef-0123-456789abcdef")._copy( - id="01234567-89ab-cdef-0123-456789abcdef" - ) assert adda1 == adda1._copy(token="x")._copy(token="t1") - assert adda1 == adda1._copy(region="x")._copy(region="reg") - assert adda1 == adda1._copy(environment="test")._copy(environment="dev") - assert adda1 == adda1._copy(caller_name="x")._copy(caller_name="cn") - assert adda1 == adda1._copy(caller_version="x")._copy(caller_version="cv") + assert adda1 == adda1._copy(api_ep9999_test, environment="test")._copy( + api_ep0123_dev, environment="dev" + ) + assert adda1 == adda1._copy(callers=callers1)._copy(callers=callers0) assert adda1 == adda1._copy(dev_ops_url="x")._copy(dev_ops_url="dou") assert adda1 == adda1._copy(dev_ops_api_version="x")._copy( dev_ops_api_version="dvv" @@ -216,40 +382,86 @@ def test_astradbdatabaseadmin_conversions(self) -> None: assert adda1 == adda1._copy(api_path="x")._copy(api_path="appi") assert adda1 == adda1._copy(api_version="x")._copy(api_version="vX") - assert adda1 != adda1.with_options(id="99999999-89ab-cdef-0123-456789abcdef") + assert adda1 != adda1.with_options(api_ep7777_dev) assert adda1 != adda1.with_options(token="x") - assert adda1 != adda1.with_options(caller_name="x") - assert adda1 != adda1.with_options(caller_version="x") + assert adda1 != adda1.with_options(callers=callers1) - assert adda1 == adda1.with_options( - id="99999999-89ab-cdef-0123-456789abcdef" - ).with_options(id="01234567-89ab-cdef-0123-456789abcdef") + assert adda1 == adda1.with_options(api_ep7777_dev).with_options(api_ep0123_dev) assert adda1 == adda1.with_options(token="x").with_options(token="t1") - assert adda1 == adda1.with_options(caller_name="x").with_options( - caller_name="cn" - ) - assert adda1 == adda1.with_options(caller_version="x").with_options( - caller_version="cv" + assert adda1 == adda1.with_options(callers=callers1).with_options( + callers=callers0 ) - adda1b = adda1._copy() - adda1b.set_caller("cn2", "cv2") - assert adda1b != adda1 - adda1b.set_caller("cn", "cv") - assert adda1b == adda1 + @sync_fail_if_not_removed + @pytest.mark.describe( + "test of DataAPIDBDatabaseAdmin set_caller and caller_name in conversions" + ) + def test_dataapidatabaseadmin_deprecated_caller_in_conversions(self) -> None: + with pytest.warns(DeprecationWarning): + dada1 = DataAPIDatabaseAdmin( + "http://a.b.c:1234", + token="t1", + environment="hcd", + api_path="appi", + api_version="v9", + caller_name="cn", + caller_version="cv", + ) + with pytest.warns(DeprecationWarning): + dada2 = DataAPIDatabaseAdmin( + "http://a.b.c:1234", + token="t1", + environment="hcd", + api_path="appi", + api_version="v9", + caller_name="cn", + caller_version="cv", + ) + assert dada1 == dada2 + + with pytest.warns(DeprecationWarning): + assert dada1 != dada1._copy(caller_name="x", caller_version="x") + with pytest.warns(DeprecationWarning): + assert dada1 != dada1._copy(caller_name="x") + with pytest.warns(DeprecationWarning): + assert dada1 != dada1._copy(caller_version="x") + + with pytest.warns(DeprecationWarning): + assert dada1 == dada1._copy(caller_name="x", caller_version="x")._copy( + caller_name="cn", caller_version="cv" + ) + + with pytest.warns(DeprecationWarning): + assert dada1 != dada1.with_options(caller_name="x", caller_version="x") + with pytest.warns(DeprecationWarning): + assert dada1 != dada1.with_options(caller_name="x") + with pytest.warns(DeprecationWarning): + assert dada1 != dada1.with_options(caller_version="x") + + with pytest.warns(DeprecationWarning): + assert dada1 == dada1.with_options( + caller_name="x", caller_version="x" + ).with_options(caller_name="cn", caller_version="cv") + + dada1b = dada1._copy() + dada1b.set_caller("cn2", "cv2") + assert dada1b != dada1 + dada1b.set_caller("cn", "cv") + assert dada1b == dada1 @pytest.mark.describe( "test of DataAPIDBDatabaseAdmin conversions and comparison functions" ) def test_dataapidatabaseadmin_conversions(self) -> None: + callers0 = [("cn", "cv"), ("dn", "dv")] + callers1 = [("x", "y")] dada1 = DataAPIDatabaseAdmin( "http://a.b.c:1234", token="t1", environment="hcd", api_path="appi", api_version="v9", - caller_name="cn", - caller_version="cv", + callers=callers0, ) dada2 = DataAPIDatabaseAdmin( "http://a.b.c:1234", @@ -257,18 +469,16 @@ def test_dataapidatabaseadmin_conversions(self) -> None: environment="hcd", api_path="appi", api_version="v9", - caller_name="cn", - caller_version="cv", + callers=callers0, ) assert dada1 == dada2 assert dada1 != dada1._copy(api_endpoint="https://x.y.z:9876") - assert dada1 != dada1._copy(token="https://x.y.z:9876") - assert dada1 != dada1._copy(environment="https://x.y.z:9876") - assert dada1 != dada1._copy(api_path="https://x.y.z:9876") - assert dada1 != dada1._copy(api_version="https://x.y.z:9876") - assert dada1 != dada1._copy(caller_name="https://x.y.z:9876") - assert dada1 != dada1._copy(caller_version="https://x.y.z:9876") + assert dada1 != dada1._copy(token="tx") + assert dada1 != dada1._copy(environment="en") + assert dada1 != dada1._copy(api_path="ap") + assert dada1 != dada1._copy(api_version="av") + assert dada1 != dada1._copy(callers=callers1) assert dada1 == dada1._copy(api_endpoint="x")._copy( api_endpoint="http://a.b.c:1234" @@ -277,31 +487,20 @@ def test_dataapidatabaseadmin_conversions(self) -> None: assert dada1 == dada1._copy(environment="x")._copy(environment="hcd") assert dada1 == dada1._copy(api_path="x")._copy(api_path="appi") assert dada1 == dada1._copy(api_version="x")._copy(api_version="v9") - assert dada1 == dada1._copy(caller_name="x")._copy(caller_name="cn") - assert dada1 == dada1._copy(caller_version="x")._copy(caller_version="cv") + assert dada1 == dada1._copy(callers=callers1)._copy(callers=callers0) assert dada1 != dada1.with_options(api_endpoint="https://x.y.z:9876") assert dada1 != dada1.with_options(token="x") - assert dada1 != dada1.with_options(caller_name="x") - assert dada1 != dada1.with_options(caller_version="x") + assert dada1 != dada1.with_options(callers=callers1) assert dada1 == dada1.with_options( api_endpoint="https://x.y.z:9876" ).with_options(api_endpoint="http://a.b.c:1234") assert dada1 == dada1.with_options(token="x").with_options(token="t1") - assert dada1 == dada1.with_options(caller_name="x").with_options( - caller_name="cn" - ) - assert dada1 == dada1.with_options(caller_version="x").with_options( - caller_version="cv" + assert dada1 == dada1.with_options(callers=callers1).with_options( + callers=callers0 ) - dada1b = dada1._copy() - dada1b.set_caller("cn2", "cv2") - assert dada1b != dada1 - dada1b.set_caller("cn", "cv") - assert dada1b == dada1 - @pytest.mark.describe("test of token inheritance in spawning from DataAPIClient") def test_dataapiclient_token_inheritance(self) -> None: client_t = DataAPIClient(token=StaticTokenProvider("static")) @@ -349,22 +548,30 @@ def test_astradbadmin_token_inheritance(self) -> None: "test of token inheritance in spawning from AstraDBDatabaseAdmin" ) def test_astradbdatabaseadmin_token_inheritance(self) -> None: - db_id_string = "01234567-89ab-cdef-0123-456789abcdef" adbadmin_t = AstraDBDatabaseAdmin( - db_id_string, + api_ep0123_dev, token=StaticTokenProvider("static"), - region="reg", + environment=Environment.DEV, + ) + adbadmin_0 = AstraDBDatabaseAdmin( + api_ep0123_dev, + environment=Environment.DEV, ) - adbadmin_0 = AstraDBDatabaseAdmin(db_id_string, region="reg") token_f = UsernamePasswordTokenProvider(username="u", password="p") - adbadmin_f = AstraDBDatabaseAdmin(db_id_string, region="reg", token=token_f) + adbadmin_f = AstraDBDatabaseAdmin( + api_ep0123_dev, + token=token_f, + environment=Environment.DEV, + ) assert adbadmin_t.get_database( - token=token_f, keyspace="n", region="r" - ) == adbadmin_f.get_database(keyspace="n", region="r") + token=token_f, + keyspace="n", + ) == adbadmin_f.get_database(keyspace="n") assert adbadmin_0.get_database( - token=token_f, keyspace="n", region="r" - ) == adbadmin_f.get_database(keyspace="n", region="r") + token=token_f, + keyspace="n", + ) == adbadmin_f.get_database(keyspace="n") @pytest.mark.describe( "test of token inheritance in spawning from DataAPIDatabaseAdmin" @@ -436,31 +643,22 @@ def test_param_normalize_getdatabase(self) -> None: adm = AstraDBAdmin("t1") db_adm1 = adm.get_database_admin(db_id, region=db_reg) - db_adm2 = adm.get_database_admin(api_ep, region=db_reg) + with pytest.raises(ValueError): + adm.get_database_admin(api_ep, region=db_reg) db_adm3 = adm.get_database_admin(api_ep) with pytest.raises(ValueError): adm.get_database_admin(api_ep, region="not-that-one") - assert db_adm1 == db_adm2 - assert db_adm2 == db_adm3 + assert db_adm1 == db_adm3 db_1 = adm.get_database(db_id, region=db_reg, keyspace="the_ks") - db_2 = adm.get_database(api_ep, region=db_reg, keyspace="the_ks") + with pytest.raises(ValueError): + adm.get_database(api_ep, region=db_reg, keyspace="the_ks") db_3 = adm.get_database(api_ep, keyspace="the_ks") with pytest.raises(ValueError): adm.get_database(api_ep, region="not-that-one", keyspace="the_ks") - assert db_1 == db_2 - assert db_2 == db_3 - - db_adm_m1 = AstraDBDatabaseAdmin(db_id, token="t", region=db_reg) - db_adm_m2 = AstraDBDatabaseAdmin(api_ep, token="t", region=db_reg) - db_adm_m3 = AstraDBDatabaseAdmin(api_ep, token="t") - with pytest.raises(ValueError): - AstraDBDatabaseAdmin(api_ep, token="t", region="not-that-one") - - assert db_adm_m1 == db_adm_m2 - assert db_adm_m1 == db_adm_m3 + assert db_1 == db_3 @pytest.mark.describe( "test of region being deprecated in AstraDBDatabaseAdmin.get_database" diff --git a/tests/idiomatic/unit/test_apicommander.py b/tests/idiomatic/unit/test_apicommander.py index 11ced3fb..48dec7e3 100644 --- a/tests/idiomatic/unit/test_apicommander.py +++ b/tests/idiomatic/unit/test_apicommander.py @@ -78,7 +78,7 @@ def test_apicommander_request_sync(self, httpserver: HTTPServer) -> None: api_endpoint=base_endpoint, path=base_path, headers={"h": "v"}, - callers=[("cn", "cv")], + callers=[("cn0", "cv0"), ("cn1", "cv1")], ) def hv_matcher(hk: str, hv: str | None, ev: str) -> bool: @@ -94,7 +94,7 @@ def hv_matcher(hk: str, hv: str | None, ev: str) -> bool: method=HttpMethod.PUT, headers={ "h": "v", - "User-Agent": "cn/cv", + "User-Agent": "cn0/cv0 cn1/cv1", }, header_value_matcher=hv_matcher, data="{}", @@ -126,7 +126,7 @@ async def test_apicommander_request_async(self, httpserver: HTTPServer) -> None: api_endpoint=base_endpoint, path=base_path, headers={"h": "v"}, - callers=[("cn", "cv")], + callers=[("cn0", "cv0"), ("cn1", "cv1")], ) def hv_matcher(hk: str, hv: str | None, ev: str) -> bool: @@ -143,7 +143,7 @@ def hv_matcher(hk: str, hv: str | None, ev: str) -> bool: data="{}", headers={ "h": "v", - "User-Agent": "cn/cv", + "User-Agent": "cn0/cv0 cn1/cv1", }, header_value_matcher=hv_matcher, ).respond_with_json({"r": 1}) diff --git a/tests/idiomatic/unit/test_bulk_write_results.py b/tests/idiomatic/unit/test_bulk_write_results.py index 519973a2..697e5f03 100644 --- a/tests/idiomatic/unit/test_bulk_write_results.py +++ b/tests/idiomatic/unit/test_bulk_write_results.py @@ -63,33 +63,14 @@ def test_reduce_bulk_write_results(self) -> None: ) assert reduced_a == expected_a - bwr_n = BulkWriteResult( - bulk_api_results={}, - deleted_count=None, - inserted_count=0, - matched_count=0, - modified_count=0, - upserted_count=0, - upserted_ids={}, - ) - bwr_1 = BulkWriteResult( - bulk_api_results={}, - deleted_count=1, - inserted_count=1, - matched_count=1, - modified_count=1, - upserted_count=1, - upserted_ids={}, - ) - - reduced_n = reduce_bulk_write_results([bwr_1, bwr_n, bwr_1]) - expected_n = BulkWriteResult( - bulk_api_results={}, - deleted_count=None, - inserted_count=2, - matched_count=2, - modified_count=2, - upserted_count=2, - upserted_ids={}, + reduced_b = reduce_bulk_write_results([bwr1, bwr2, bwr1]) + expected_b = BulkWriteResult( + bulk_api_results={1: [{"seq1": 1}]}, + deleted_count=210, + inserted_count=420, + matched_count=630, + modified_count=840, + upserted_count=1050, + upserted_ids={1: {"useq1": 1}, 2: {"useq2": 2}}, ) - assert reduced_n == expected_n + assert reduced_b == expected_b diff --git a/tests/idiomatic/unit/test_collections_async.py b/tests/idiomatic/unit/test_collections_async.py index bbe15b26..43e406fd 100644 --- a/tests/idiomatic/unit/test_collections_async.py +++ b/tests/idiomatic/unit/test_collections_async.py @@ -34,14 +34,12 @@ async def test_instantiate_collection_async( col1 = AsyncCollection( async_database, "id_test_collection", - caller_name="c_n", - caller_version="c_v", + callers=[("cn", "cv")], ) col2 = AsyncCollection( async_database, "id_test_collection", - caller_name="c_n", - caller_version="c_v", + callers=[("cn", "cv")], ) assert col1 == col2 @@ -53,74 +51,179 @@ async def test_convert_collection_async( col1 = AsyncCollection( async_database, "id_test_collection", - caller_name="c_n", - caller_version="c_v", + callers=[("cn", "cv")], ) assert col1 == col1._copy() assert col1 == col1.with_options() assert col1 == col1.to_sync().to_async() + @async_fail_if_not_removed + @pytest.mark.describe( + "test of Collection set_caller and caller_name in rich _copy, async" + ) + async def test_deprecated_caller_in_rich_copy_collection_async( + self, + async_database: AsyncDatabase, + ) -> None: + with pytest.warns(DeprecationWarning): + col1 = AsyncCollection( + async_database, + "id_test_collection", + caller_name="c_n", + caller_version="c_v", + ) + assert col1 == AsyncCollection( + async_database, + "id_test_collection", + callers=[("c_n", "c_v")], + ) + with pytest.raises(ValueError, match="`caller_name` and `caller_version`"): + AsyncCollection( + async_database, + "id_test_collection", + callers=[("c_n", "c_v")], + caller_name="c_n", + ) + with pytest.raises(ValueError, match="`caller_name` and `caller_version`"): + AsyncCollection( + async_database, + "id_test_collection", + callers=[("c_n", "c_v")], + caller_version="c_v", + ) + with pytest.warns(DeprecationWarning): + assert col1 != col1._copy(caller_name="o", caller_version="o") + with pytest.warns(DeprecationWarning): + assert col1 != col1._copy(caller_name="o") + with pytest.warns(DeprecationWarning): + assert col1 != col1._copy(caller_version="o") + + with pytest.warns(DeprecationWarning): + col2 = col1._copy( + database=async_database._copy(token="x_t"), + name="other_name", + keyspace="other_keyspace", + caller_name="x_n", + caller_version="x_v", + ) + assert col2 != col1 + + with pytest.warns(DeprecationWarning): + col2.set_caller( + caller_name="c_n", + caller_version="c_v", + ) + col3 = col2._copy( + database=async_database, + name="id_test_collection", + keyspace=async_database.keyspace, + ) + assert col3 == col1 + + with pytest.warns(DeprecationWarning): + assert col1.with_options(caller_name="x", caller_version="x") != col1 + with pytest.warns(DeprecationWarning): + assert col1.with_options(caller_name="x") != col1 + with pytest.warns(DeprecationWarning): + assert col1.with_options(caller_version="x") != col1 + + with pytest.warns(DeprecationWarning): + assert ( + col1.with_options(caller_name="x", caller_version="x").with_options( + caller_name="c_n", caller_version="c_v" + ) + == col1 + ) + @pytest.mark.describe("test of Collection rich _copy, async") async def test_rich_copy_collection_async( self, async_database: AsyncDatabase, ) -> None: + callers0 = [("cn", "cv"), ("dn", "dv")] + callers1 = [("x", "y")] col1 = AsyncCollection( async_database, "id_test_collection", - caller_name="c_n", - caller_version="c_v", + callers=callers0, ) assert col1 != col1._copy(database=async_database._copy(token="x_t")) assert col1 != col1._copy(name="o") assert col1 != col1._copy(keyspace="o") - assert col1 != col1._copy(caller_name="o") - assert col1 != col1._copy(caller_version="o") + assert col1 != col1._copy(callers=callers1) col2 = col1._copy( database=async_database._copy(token="x_t"), name="other_name", keyspace="other_keyspace", - caller_name="x_n", - caller_version="x_v", + callers=callers1, ) assert col2 != col1 - col2.set_caller( - caller_name="c_n", - caller_version="c_v", - ) - col3 = col2._copy( - database=async_database, - name="id_test_collection", - keyspace=async_database.keyspace, - ) - assert col3 == col1 - assert col1.with_options(name="x") != col1 + assert col1.with_options(callers=callers1) != col1 + assert ( col1.with_options(name="x").with_options(name="id_test_collection") == col1 ) - assert col1.with_options(caller_name="x") != col1 assert ( - col1.with_options(caller_name="x").with_options(caller_name="c_n") == col1 + col1.with_options(callers=callers1).with_options(callers=callers0) == col1 ) - assert col1.with_options(caller_version="x") != col1 - assert ( - col1.with_options(caller_version="x").with_options(caller_version="c_v") - == col1 + + @async_fail_if_not_removed + @pytest.mark.describe( + "test of set_caller and caller_name in Collection rich conversions, async" + ) + async def test_deprecated_caller_in_rich_convert_collection_async( + self, + async_database: AsyncDatabase, + ) -> None: + with pytest.warns(DeprecationWarning): + col1 = AsyncCollection( + async_database, + "id_test_collection", + caller_name="c_n", + caller_version="c_v", + ) + with pytest.warns(DeprecationWarning): + assert col1 != col1.to_sync(caller_name="o").to_async() + with pytest.warns(DeprecationWarning): + assert col1 != col1.to_sync(caller_version="o").to_async() + + with pytest.warns(DeprecationWarning): + col2s = col1.to_sync( + database=async_database._copy(token="x_t").to_sync(), + name="other_name", + keyspace="other_keyspace", + caller_name="x_n", + caller_version="x_v", + ) + assert col2s.to_async() != col1 + + with pytest.warns(DeprecationWarning): + col2s.set_caller( + caller_name="c_n", + caller_version="c_v", + ) + col3 = col2s.to_async( + database=async_database, + name="id_test_collection", + keyspace=async_database.keyspace, ) + assert col3 == col1 @pytest.mark.describe("test of Collection rich conversions, async") async def test_rich_convert_collection_async( self, async_database: AsyncDatabase, ) -> None: + callers0 = [("cn", "cv"), ("dn", "dv")] + callers1 = [("x", "y")] col1 = AsyncCollection( async_database, "id_test_collection", - caller_name="c_n", - caller_version="c_v", + keyspace="the_ks", + callers=callers0, ) assert ( col1 @@ -130,26 +233,21 @@ async def test_rich_convert_collection_async( ) assert col1 != col1.to_sync(name="o").to_async() assert col1 != col1.to_sync(keyspace="o").to_async() - assert col1 != col1.to_sync(caller_name="o").to_async() - assert col1 != col1.to_sync(caller_version="o").to_async() + assert col1 != col1.to_sync(callers=callers1).to_async() col2s = col1.to_sync( database=async_database._copy(token="x_t").to_sync(), name="other_name", keyspace="other_keyspace", - caller_name="x_n", - caller_version="x_v", + callers=callers1, ) assert col2s.to_async() != col1 - col2s.set_caller( - caller_name="c_n", - caller_version="c_v", - ) col3 = col2s.to_async( - database=async_database, + database=async_database._copy(), name="id_test_collection", - keyspace=async_database.keyspace, + keyspace="the_ks", + callers=callers0, ) assert col3 == col1 @@ -172,6 +270,7 @@ async def test_collection_name_property_async( col1 = AsyncCollection(db1, "coll") assert col1.name == "coll" + @async_fail_if_not_removed @pytest.mark.describe("test of Collection set_caller, async") async def test_collection_set_caller_async( self, @@ -195,6 +294,7 @@ async def test_collection_set_caller_async( ) assert col1 == col2 + @async_fail_if_not_removed @pytest.mark.describe("test collection conversions with caller mutableness, async") async def test_collection_conversions_caller_mutableness_async( self, diff --git a/tests/idiomatic/unit/test_collections_sync.py b/tests/idiomatic/unit/test_collections_sync.py index 1a2d1527..568236f3 100644 --- a/tests/idiomatic/unit/test_collections_sync.py +++ b/tests/idiomatic/unit/test_collections_sync.py @@ -34,14 +34,12 @@ def test_instantiate_collection_sync( col1 = Collection( sync_database, "id_test_collection", - caller_name="c_n", - caller_version="c_v", + callers=[("cn", "cv")], ) col2 = Collection( sync_database, "id_test_collection", - caller_name="c_n", - caller_version="c_v", + callers=[("cn", "cv")], ) assert col1 == col2 @@ -50,77 +48,178 @@ def test_convert_collection_sync( self, sync_database: Database, ) -> None: - col1 = Collection( - sync_database, - "id_test_collection", - caller_name="c_n", - caller_version="c_v", - ) + col1 = Collection(sync_database, "id_test_collection", callers=[("cn", "cv")]) assert col1 == col1._copy() assert col1 == col1.with_options() assert col1 == col1.to_async().to_sync() + @sync_fail_if_not_removed + @pytest.mark.describe( + "test of Collection set_caller and caller_name in rich _copy, sync" + ) + def test_deprecated_caller_in_rich_copy_collection_sync( + self, + sync_database: Database, + ) -> None: + with pytest.warns(DeprecationWarning): + col1 = Collection( + sync_database, + "id_test_collection", + caller_name="c_n", + caller_version="c_v", + ) + assert col1 == Collection( + sync_database, + "id_test_collection", + callers=[("c_n", "c_v")], + ) + with pytest.raises(ValueError, match="`caller_name` and `caller_version`"): + Collection( + sync_database, + "id_test_collection", + callers=[("c_n", "c_v")], + caller_name="c_n", + ) + with pytest.raises(ValueError, match="`caller_name` and `caller_version`"): + Collection( + sync_database, + "id_test_collection", + callers=[("c_n", "c_v")], + caller_version="c_v", + ) + with pytest.warns(DeprecationWarning): + assert col1 != col1._copy(caller_name="o", caller_version="o") + with pytest.warns(DeprecationWarning): + assert col1 != col1._copy(caller_name="o") + with pytest.warns(DeprecationWarning): + assert col1 != col1._copy(caller_version="o") + + with pytest.warns(DeprecationWarning): + col2 = col1._copy( + database=sync_database._copy(token="x_t"), + name="other_name", + keyspace="other_keyspace", + caller_name="x_n", + caller_version="x_v", + ) + assert col2 != col1 + + with pytest.warns(DeprecationWarning): + col2.set_caller( + caller_name="c_n", + caller_version="c_v", + ) + col3 = col2._copy( + database=sync_database, + name="id_test_collection", + keyspace=sync_database.keyspace, + ) + assert col3 == col1 + + with pytest.warns(DeprecationWarning): + assert col1.with_options(caller_name="x", caller_version="x") != col1 + with pytest.warns(DeprecationWarning): + assert col1.with_options(caller_name="x") != col1 + with pytest.warns(DeprecationWarning): + assert col1.with_options(caller_version="x") != col1 + + with pytest.warns(DeprecationWarning): + assert ( + col1.with_options(caller_name="x", caller_version="x").with_options( + caller_name="c_n", caller_version="c_v" + ) + == col1 + ) + @pytest.mark.describe("test of Collection rich _copy, sync") def test_rich_copy_collection_sync( self, sync_database: Database, ) -> None: + callers0 = [("cn", "cv"), ("dn", "dv")] + callers1 = [("x", "y")] col1 = Collection( sync_database, "id_test_collection", - caller_name="c_n", - caller_version="c_v", + callers=callers0, ) assert col1 != col1._copy(database=sync_database._copy(token="x_t")) assert col1 != col1._copy(name="o") assert col1 != col1._copy(keyspace="o") - assert col1 != col1._copy(caller_name="o") - assert col1 != col1._copy(caller_version="o") + assert col1 != col1._copy(callers=callers1) col2 = col1._copy( database=sync_database._copy(token="x_t"), name="other_name", keyspace="other_keyspace", - caller_name="x_n", - caller_version="x_v", + callers=callers1, ) assert col2 != col1 - col2.set_caller( - caller_name="c_n", - caller_version="c_v", - ) - col3 = col2._copy( - database=sync_database, - name="id_test_collection", - keyspace=sync_database.keyspace, - ) - assert col3 == col1 - assert col1.with_options(name="x") != col1 + assert col1.with_options(callers=callers1) != col1 + assert ( col1.with_options(name="x").with_options(name="id_test_collection") == col1 ) - assert col1.with_options(caller_name="x") != col1 assert ( - col1.with_options(caller_name="x").with_options(caller_name="c_n") == col1 + col1.with_options(callers=callers1).with_options(callers=callers0) == col1 ) - assert col1.with_options(caller_version="x") != col1 - assert ( - col1.with_options(caller_version="x").with_options(caller_version="c_v") - == col1 + + @sync_fail_if_not_removed + @pytest.mark.describe( + "test of set_caller and caller_name in Collection rich conversions, sync" + ) + def test_deprecated_caller_in_rich_convert_collection_sync( + self, + sync_database: Database, + ) -> None: + with pytest.warns(DeprecationWarning): + col1 = Collection( + sync_database, + "id_test_collection", + caller_name="c_n", + caller_version="c_v", + ) + with pytest.warns(DeprecationWarning): + assert col1 != col1.to_async(caller_name="o").to_sync() + with pytest.warns(DeprecationWarning): + assert col1 != col1.to_async(caller_version="o").to_sync() + + with pytest.warns(DeprecationWarning): + col2a = col1.to_async( + database=sync_database._copy(token="x_t").to_async(), + name="other_name", + keyspace="other_keyspace", + caller_name="x_n", + caller_version="x_v", + ) + assert col2a.to_sync() != col1 + + with pytest.warns(DeprecationWarning): + col2a.set_caller( + caller_name="c_n", + caller_version="c_v", + ) + col3 = col2a.to_sync( + database=sync_database, + name="id_test_collection", + keyspace=sync_database.keyspace, ) + assert col3 == col1 @pytest.mark.describe("test of Collection rich conversions, sync") def test_rich_convert_collection_sync( self, sync_database: Database, ) -> None: + callers0 = [("cn", "cv"), ("dn", "dv")] + callers1 = [("x", "y")] col1 = Collection( sync_database, "id_test_collection", - caller_name="c_n", - caller_version="c_v", + keyspace="the_ks", + callers=callers0, ) assert ( col1 @@ -130,26 +229,21 @@ def test_rich_convert_collection_sync( ) assert col1 != col1.to_async(name="o").to_sync() assert col1 != col1.to_async(keyspace="o").to_sync() - assert col1 != col1.to_async(caller_name="o").to_sync() - assert col1 != col1.to_async(caller_version="o").to_sync() + assert col1 != col1.to_async(callers=callers1).to_sync() col2a = col1.to_async( database=sync_database._copy(token="x_t").to_async(), name="other_name", keyspace="other_keyspace", - caller_name="x_n", - caller_version="x_v", + callers=callers1, ) assert col2a.to_sync() != col1 - col2a.set_caller( - caller_name="c_n", - caller_version="c_v", - ) col3 = col2a.to_sync( - database=sync_database, + database=sync_database._copy(), name="id_test_collection", - keyspace=sync_database.keyspace, + keyspace="the_ks", + callers=callers0, ) assert col3 == col1 @@ -172,6 +266,7 @@ def test_collection_name_property_sync( col1 = Collection(db1, "coll") assert col1.name == "coll" + @sync_fail_if_not_removed @pytest.mark.describe("test of Collection set_caller, sync") def test_collection_set_caller_sync( self, @@ -195,6 +290,7 @@ def test_collection_set_caller_sync( ) assert col1 == col2 + @sync_fail_if_not_removed @pytest.mark.describe("test collection conversions with caller mutableness, sync") def test_collection_conversions_caller_mutableness_sync( self, diff --git a/tests/idiomatic/unit/test_databases_async.py b/tests/idiomatic/unit/test_databases_async.py index 3a6e2d8c..e36a9c6a 100644 --- a/tests/idiomatic/unit/test_databases_async.py +++ b/tests/idiomatic/unit/test_databases_async.py @@ -24,9 +24,14 @@ from ..conftest import ( TEST_COLLECTION_INSTANCE_NAME, DataAPICredentials, + async_fail_if_not_removed, sync_fail_if_not_removed, ) +api_ep5643_prod = ( + "https://56439999-89ab-cdef-0123-456789abcdef-region.apps.astra.datastax.com" +) + class TestDatabasesAsync: @pytest.mark.describe("test of instantiating Database, async") @@ -35,13 +40,11 @@ async def test_instantiate_database_async( data_api_credentials_kwargs: DataAPICredentials, ) -> None: db1 = AsyncDatabase( - caller_name="c_n", - caller_version="c_v", + callers=[("c_n", "c_v")], **data_api_credentials_kwargs, ) db2 = AsyncDatabase( - caller_name="c_n", - caller_version="c_v", + callers=[("c_n", "c_v")], **data_api_credentials_kwargs, ) assert db1 == db2 @@ -52,32 +55,116 @@ async def test_convert_database_async( data_api_credentials_kwargs: DataAPICredentials, ) -> None: db1 = AsyncDatabase( - caller_name="c_n", - caller_version="c_v", + callers=[("c_n", "c_v")], **data_api_credentials_kwargs, ) assert db1 == db1._copy() assert db1 == db1.with_options() assert db1 == db1.to_sync().to_async() + @async_fail_if_not_removed + @pytest.mark.describe("test of caller deprecation in Database rich _copy, async") + async def test_caller_deprecation_in_rich_copy_database_async( + self, + ) -> None: + with pytest.warns(DeprecationWarning): + db1 = AsyncDatabase( + api_endpoint="api_endpoint", + token="token", + keyspace="keyspace", + caller_name="c_n", + caller_version="c_v", + api_path="api_path", + api_version="api_version", + ) + assert db1 == AsyncDatabase( + api_endpoint="api_endpoint", + token="token", + keyspace="keyspace", + callers=[("c_n", "c_v")], + api_path="api_path", + api_version="api_version", + ) + with pytest.raises(ValueError, match="`caller_name` and `caller_version`"): + assert db1 == AsyncDatabase( + api_endpoint="api_endpoint", + token="token", + keyspace="keyspace", + callers=[("c_n", "c_v")], + caller_name="c_n", + ) + with pytest.raises(ValueError, match="`caller_name` and `caller_version`"): + assert db1 == AsyncDatabase( + api_endpoint="api_endpoint", + token="token", + keyspace="keyspace", + callers=[("c_n", "c_v")], + caller_version="c_v", + ) + with pytest.warns(DeprecationWarning): + assert db1 != db1._copy(caller_name="x", caller_version="x") + with pytest.warns(DeprecationWarning): + assert db1 != db1._copy(caller_name="x") + with pytest.warns(DeprecationWarning): + assert db1 != db1._copy(caller_version="x") + + with pytest.warns(DeprecationWarning): + db2 = db1._copy( + api_endpoint="x", + token="x", + keyspace="x", + caller_name="x_n", + caller_version="x_v", + api_path="x", + api_version="x", + ) + assert db2 != db1 + + with pytest.warns(DeprecationWarning): + db2.set_caller( + caller_name="c_n", + caller_version="c_v", + ) + db3 = db2._copy( + api_endpoint="api_endpoint", + token="token", + keyspace="keyspace", + api_path="api_path", + api_version="api_version", + ) + assert db3 == db1 + + with pytest.warns(DeprecationWarning): + assert db1.with_options(caller_name="x") != db1 + with pytest.warns(DeprecationWarning): + assert db1.with_options(caller_version="x") != db1 + + with pytest.warns(DeprecationWarning): + assert ( + db1.with_options(caller_name="x", caller_version="x").with_options( + caller_name="c_n", caller_version="c_v" + ) + == db1 + ) + @pytest.mark.describe("test of Database rich _copy, async") async def test_rich_copy_database_async( self, ) -> None: + callers0 = [("cn", "cv"), ("dn", "dv")] + callers1 = [("x", "y")] db1 = AsyncDatabase( api_endpoint="api_endpoint", token="token", keyspace="keyspace", - caller_name="c_n", - caller_version="c_v", + callers=callers0, api_path="api_path", api_version="api_version", ) assert db1 != db1._copy(api_endpoint="x") assert db1 != db1._copy(token="x") assert db1 != db1._copy(keyspace="x") - assert db1 != db1._copy(caller_name="x") - assert db1 != db1._copy(caller_version="x") + assert db1 != db1._copy(callers=callers1) assert db1 != db1._copy(api_path="x") assert db1 != db1._copy(api_version="x") @@ -85,18 +172,58 @@ async def test_rich_copy_database_async( api_endpoint="x", token="x", keyspace="x", - caller_name="x_n", - caller_version="x_v", + callers=callers1, api_path="x", api_version="x", ) assert db2 != db1 - db2.set_caller( - caller_name="c_n", - caller_version="c_v", - ) - db3 = db2._copy( + assert db1.with_options(keyspace="x") != db1 + assert db1.with_options(callers=callers1) != db1 + + assert db1.with_options(keyspace="x").with_options(keyspace="keyspace") == db1 + assert db1.with_options(callers=callers1).with_options(callers=callers0) == db1 + + @async_fail_if_not_removed + @pytest.mark.describe( + "test of caller deprecation in Database rich conversions, async" + ) + async def test_caller_deprecation_in_rich_convert_database_async( + self, + ) -> None: + with pytest.warns(DeprecationWarning): + db1 = AsyncDatabase( + api_endpoint="api_endpoint", + token="token", + keyspace="keyspace", + caller_name="c_n", + caller_version="c_v", + api_path="api_path", + api_version="api_version", + ) + with pytest.warns(DeprecationWarning): + assert db1 != db1.to_sync(caller_name="o").to_async() + with pytest.warns(DeprecationWarning): + assert db1 != db1.to_sync(caller_version="o").to_async() + + with pytest.warns(DeprecationWarning): + db2s = db1.to_sync( + api_endpoint="x", + token="x", + keyspace="x", + caller_name="x_n", + caller_version="x_v", + api_path="x", + api_version="x", + ) + assert db2s.to_async() != db1 + + with pytest.warns(DeprecationWarning): + db2s.set_caller( + caller_name="c_n", + caller_version="c_v", + ) + db3 = db2s.to_async( api_endpoint="api_endpoint", token="token", keyspace="keyspace", @@ -105,34 +232,24 @@ async def test_rich_copy_database_async( ) assert db3 == db1 - assert db1.with_options(keyspace="x") != db1 - assert db1.with_options(keyspace="x").with_options(keyspace="keyspace") == db1 - assert db1.with_options(caller_name="x") != db1 - assert db1.with_options(caller_name="x").with_options(caller_name="c_n") == db1 - assert db1.with_options(caller_version="x") != db1 - assert ( - db1.with_options(caller_version="x").with_options(caller_version="c_v") - == db1 - ) - @pytest.mark.describe("test of Database rich conversions, async") async def test_rich_convert_database_async( self, ) -> None: + callers0 = [("cn", "cv"), ("dn", "dv")] + callers1 = [("x", "y")] db1 = AsyncDatabase( api_endpoint="api_endpoint", token="token", keyspace="keyspace", - caller_name="c_n", - caller_version="c_v", + callers=callers0, api_path="api_path", api_version="api_version", ) assert db1 != db1.to_sync(api_endpoint="o").to_async() assert db1 != db1.to_sync(token="o").to_async() assert db1 != db1.to_sync(keyspace="o").to_async() - assert db1 != db1.to_sync(caller_name="o").to_async() - assert db1 != db1.to_sync(caller_version="o").to_async() + assert db1 != db1.to_sync(callers=callers1).to_async() assert db1 != db1.to_sync(api_path="o").to_async() assert db1 != db1.to_sync(api_version="o").to_async() @@ -140,41 +257,40 @@ async def test_rich_convert_database_async( api_endpoint="x", token="x", keyspace="x", - caller_name="x_n", - caller_version="x_v", + callers=callers1, api_path="x", api_version="x", ) assert db2s.to_async() != db1 - db2s.set_caller( - caller_name="c_n", - caller_version="c_v", - ) db3 = db2s.to_async( api_endpoint="api_endpoint", token="token", keyspace="keyspace", + callers=callers0, api_path="api_path", api_version="api_version", ) assert db3 == db1 + @async_fail_if_not_removed @pytest.mark.describe("test of Database set_caller, async") async def test_database_set_caller_async( self, data_api_credentials_kwargs: DataAPICredentials, ) -> None: - db1 = AsyncDatabase( - caller_name="c_n1", - caller_version="c_v1", - **data_api_credentials_kwargs, - ) - db2 = AsyncDatabase( - caller_name="c_n2", - caller_version="c_v2", - **data_api_credentials_kwargs, - ) + with pytest.warns(DeprecationWarning): + db1 = AsyncDatabase( + caller_name="c_n1", + caller_version="c_v1", + **data_api_credentials_kwargs, + ) + with pytest.warns(DeprecationWarning): + db2 = AsyncDatabase( + caller_name="c_n2", + caller_version="c_v2", + **data_api_credentials_kwargs, + ) db2.set_caller( caller_name="c_n1", caller_version="c_v1", @@ -202,25 +318,28 @@ async def test_database_get_collection_async( ) assert collection_ks2.database.keyspace == NAMESPACE_2 + @async_fail_if_not_removed @pytest.mark.describe("test database conversions with caller mutableness, async") async def test_database_conversions_caller_mutableness_async( self, data_api_credentials_kwargs: DataAPICredentials, ) -> None: - db1 = AsyncDatabase( - caller_name="c_n1", - caller_version="c_v1", - **data_api_credentials_kwargs, - ) + with pytest.warns(DeprecationWarning): + db1 = AsyncDatabase( + caller_name="c_n1", + caller_version="c_v1", + **data_api_credentials_kwargs, + ) db1.set_caller( caller_name="c_n2", caller_version="c_v2", ) - db2 = AsyncDatabase( - caller_name="c_n2", - caller_version="c_v2", - **data_api_credentials_kwargs, - ) + with pytest.warns(DeprecationWarning): + db2 = AsyncDatabase( + caller_name="c_n2", + caller_version="c_v2", + **data_api_credentials_kwargs, + ) assert db1.to_sync().to_async() == db2 assert db1._copy() == db2 @@ -261,10 +380,14 @@ async def test_database_from_client_default_keyspace_per_environment_async( self, ) -> None: client_a = DataAPIClient(environment=Environment.PROD) - db_a_m = client_a.get_async_database("ep", region="r", keyspace="M") + db_a_m = client_a.get_async_database(id="id", region="r", keyspace="M") assert db_a_m.keyspace == "M" - db_a_n = client_a.get_async_database("ep", region="r") + db_a_n = client_a.get_async_database(id="id", region="r") assert db_a_n.keyspace == DEFAULT_ASTRA_DB_KEYSPACE + db_a_me = client_a.get_async_database(api_ep5643_prod, keyspace="M") + assert db_a_me.keyspace == "M" + db_a_ne = client_a.get_async_database(api_ep5643_prod) + assert db_a_ne.keyspace == DEFAULT_ASTRA_DB_KEYSPACE client_o = DataAPIClient(environment=Environment.OTHER) db_a_m = client_o.get_async_database("http://a", keyspace="M") diff --git a/tests/idiomatic/unit/test_databases_sync.py b/tests/idiomatic/unit/test_databases_sync.py index 9adce4fe..a180da39 100644 --- a/tests/idiomatic/unit/test_databases_sync.py +++ b/tests/idiomatic/unit/test_databases_sync.py @@ -27,6 +27,10 @@ sync_fail_if_not_removed, ) +api_ep5643_prod = ( + "https://56439999-89ab-cdef-0123-456789abcdef-region.apps.astra.datastax.com" +) + class TestDatabasesSync: @pytest.mark.describe("test of instantiating Database, sync") @@ -35,13 +39,11 @@ def test_instantiate_database_sync( data_api_credentials_kwargs: DataAPICredentials, ) -> None: db1 = Database( - caller_name="c_n", - caller_version="c_v", + callers=[("c_n", "c_v")], **data_api_credentials_kwargs, ) db2 = Database( - caller_name="c_n", - caller_version="c_v", + callers=[("c_n", "c_v")], **data_api_credentials_kwargs, ) assert db1 == db2 @@ -52,32 +54,118 @@ def test_convert_database_sync( data_api_credentials_kwargs: DataAPICredentials, ) -> None: db1 = Database( - caller_name="c_n", - caller_version="c_v", + callers=[("c_n", "c_v")], **data_api_credentials_kwargs, ) assert db1 == db1._copy() assert db1 == db1.with_options() assert db1 == db1.to_async().to_sync() + @sync_fail_if_not_removed + @pytest.mark.describe("test of caller deprecation in Database rich _copy, sync") + def test_caller_deprecation_in_rich_copy_database_sync( + self, + ) -> None: + with pytest.warns(DeprecationWarning): + db1 = Database( + api_endpoint="api_endpoint", + token="token", + keyspace="keyspace", + caller_name="c_n", + caller_version="c_v", + api_path="api_path", + api_version="api_version", + ) + assert db1 == Database( + api_endpoint="api_endpoint", + token="token", + keyspace="keyspace", + callers=[("c_n", "c_v")], + api_path="api_path", + api_version="api_version", + ) + with pytest.raises(ValueError, match="`caller_name` and `caller_version`"): + assert db1 == Database( + api_endpoint="api_endpoint", + token="token", + keyspace="keyspace", + callers=[("c_n", "c_v")], + caller_name="c_n", + ) + with pytest.raises(ValueError, match="`caller_name` and `caller_version`"): + assert db1 == Database( + api_endpoint="api_endpoint", + token="token", + keyspace="keyspace", + callers=[("c_n", "c_v")], + caller_version="c_v", + ) + with pytest.warns(DeprecationWarning): + assert db1 != db1._copy(caller_name="x", caller_version="x") + with pytest.warns(DeprecationWarning): + assert db1 != db1._copy(caller_name="x") + with pytest.warns(DeprecationWarning): + assert db1 != db1._copy(caller_version="x") + + with pytest.warns(DeprecationWarning): + db2 = db1._copy( + api_endpoint="x", + token="x", + keyspace="x", + caller_name="x_n", + caller_version="x_v", + api_path="x", + api_version="x", + ) + assert db2 != db1 + + with pytest.warns(DeprecationWarning): + db2.set_caller( + caller_name="c_n", + caller_version="c_v", + ) + db3 = db2._copy( + api_endpoint="api_endpoint", + token="token", + keyspace="keyspace", + api_path="api_path", + api_version="api_version", + ) + assert db3 == db1 + + with pytest.warns(DeprecationWarning): + assert db1.with_options(caller_name="x", caller_version="x") != db1 + with pytest.warns(DeprecationWarning): + assert db1.with_options(caller_name="x") != db1 + with pytest.warns(DeprecationWarning): + assert db1.with_options(caller_version="x") != db1 + + with pytest.warns(DeprecationWarning): + assert ( + db1.with_options(caller_name="x", caller_version="x").with_options( + caller_name="c_n", caller_version="c_v" + ) + == db1 + ) + @pytest.mark.describe("test of Database rich _copy, sync") def test_rich_copy_database_sync( self, ) -> None: + callers0 = [("cn", "cv"), ("dn", "dv")] + callers1 = [("x", "y")] db1 = Database( api_endpoint="api_endpoint", token="token", keyspace="keyspace", - caller_name="c_n", - caller_version="c_v", + callers=callers0, api_path="api_path", api_version="api_version", ) assert db1 != db1._copy(api_endpoint="x") assert db1 != db1._copy(token="x") assert db1 != db1._copy(keyspace="x") - assert db1 != db1._copy(caller_name="x") - assert db1 != db1._copy(caller_version="x") + assert db1 != db1._copy(callers=callers1) assert db1 != db1._copy(api_path="x") assert db1 != db1._copy(api_version="x") @@ -85,18 +173,58 @@ def test_rich_copy_database_sync( api_endpoint="x", token="x", keyspace="x", - caller_name="x_n", - caller_version="x_v", + callers=callers1, api_path="x", api_version="x", ) assert db2 != db1 - db2.set_caller( - caller_name="c_n", - caller_version="c_v", - ) - db3 = db2._copy( + assert db1.with_options(keyspace="x") != db1 + assert db1.with_options(callers=callers1) != db1 + + assert db1.with_options(keyspace="x").with_options(keyspace="keyspace") == db1 + assert db1.with_options(callers=callers1).with_options(callers=callers0) == db1 + + @sync_fail_if_not_removed + @pytest.mark.describe( + "test of caller deprecation in Database rich conversions, sync" + ) + def test_caller_deprecation_in_rich_convert_database_sync( + self, + ) -> None: + with pytest.warns(DeprecationWarning): + db1 = Database( + api_endpoint="api_endpoint", + token="token", + keyspace="keyspace", + caller_name="c_n", + caller_version="c_v", + api_path="api_path", + api_version="api_version", + ) + with pytest.warns(DeprecationWarning): + assert db1 != db1.to_async(caller_name="o").to_sync() + with pytest.warns(DeprecationWarning): + assert db1 != db1.to_async(caller_version="o").to_sync() + + with pytest.warns(DeprecationWarning): + db2a = db1.to_async( + api_endpoint="x", + token="x", + keyspace="x", + caller_name="x_n", + caller_version="x_v", + api_path="x", + api_version="x", + ) + assert db2a.to_sync() != db1 + + with pytest.warns(DeprecationWarning): + db2a.set_caller( + caller_name="c_n", + caller_version="c_v", + ) + db3 = db2a.to_sync( api_endpoint="api_endpoint", token="token", keyspace="keyspace", @@ -105,34 +233,24 @@ def test_rich_copy_database_sync( ) assert db3 == db1 - assert db1.with_options(keyspace="x") != db1 - assert db1.with_options(keyspace="x").with_options(keyspace="keyspace") == db1 - assert db1.with_options(caller_name="x") != db1 - assert db1.with_options(caller_name="x").with_options(caller_name="c_n") == db1 - assert db1.with_options(caller_version="x") != db1 - assert ( - db1.with_options(caller_version="x").with_options(caller_version="c_v") - == db1 - ) - @pytest.mark.describe("test of Database rich conversions, sync") def test_rich_convert_database_sync( self, ) -> None: + callers0 = [("cn", "cv"), ("dn", "dv")] + callers1 = [("x", "y")] db1 = Database( api_endpoint="api_endpoint", token="token", keyspace="keyspace", - caller_name="c_n", - caller_version="c_v", + callers=callers0, api_path="api_path", api_version="api_version", ) assert db1 != db1.to_async(api_endpoint="o").to_sync() assert db1 != db1.to_async(token="o").to_sync() assert db1 != db1.to_async(keyspace="o").to_sync() - assert db1 != db1.to_async(caller_name="o").to_sync() - assert db1 != db1.to_async(caller_version="o").to_sync() + assert db1 != db1.to_async(callers=callers1).to_sync() assert db1 != db1.to_async(api_path="o").to_sync() assert db1 != db1.to_async(api_version="o").to_sync() @@ -140,41 +258,40 @@ def test_rich_convert_database_sync( api_endpoint="x", token="x", keyspace="x", - caller_name="x_n", - caller_version="x_v", + callers=callers1, api_path="x", api_version="x", ) assert db2a.to_sync() != db1 - db2a.set_caller( - caller_name="c_n", - caller_version="c_v", - ) db3 = db2a.to_sync( api_endpoint="api_endpoint", token="token", keyspace="keyspace", + callers=callers0, api_path="api_path", api_version="api_version", ) assert db3 == db1 + @sync_fail_if_not_removed @pytest.mark.describe("test of Database set_caller, sync") def test_database_set_caller_sync( self, data_api_credentials_kwargs: DataAPICredentials, ) -> None: - db1 = Database( - caller_name="c_n1", - caller_version="c_v1", - **data_api_credentials_kwargs, - ) - db2 = Database( - caller_name="c_n2", - caller_version="c_v2", - **data_api_credentials_kwargs, - ) + with pytest.warns(DeprecationWarning): + db1 = Database( + caller_name="c_n1", + caller_version="c_v1", + **data_api_credentials_kwargs, + ) + with pytest.warns(DeprecationWarning): + db2 = Database( + caller_name="c_n2", + caller_version="c_v2", + **data_api_credentials_kwargs, + ) db2.set_caller( caller_name="c_n1", caller_version="c_v1", @@ -203,25 +320,28 @@ def test_database_get_collection_sync( ) assert collection_ks2.database.keyspace == NAMESPACE_2 + @sync_fail_if_not_removed @pytest.mark.describe("test database conversions with caller mutableness, sync") def test_database_conversions_caller_mutableness_sync( self, data_api_credentials_kwargs: DataAPICredentials, ) -> None: - db1 = Database( - caller_name="c_n1", - caller_version="c_v1", - **data_api_credentials_kwargs, - ) + with pytest.warns(DeprecationWarning): + db1 = Database( + caller_name="c_n1", + caller_version="c_v1", + **data_api_credentials_kwargs, + ) db1.set_caller( caller_name="c_n2", caller_version="c_v2", ) - db2 = Database( - caller_name="c_n2", - caller_version="c_v2", - **data_api_credentials_kwargs, - ) + with pytest.warns(DeprecationWarning): + db2 = Database( + caller_name="c_n2", + caller_version="c_v2", + **data_api_credentials_kwargs, + ) assert db1.to_async().to_sync() == db2 assert db1._copy() == db2 @@ -256,10 +376,14 @@ def test_database_default_keyspace_per_environment_sync(self) -> None: ) def test_database_from_client_default_keyspace_per_environment_sync(self) -> None: client_a = DataAPIClient(environment=Environment.PROD) - db_a_m = client_a.get_database("id", region="r", keyspace="M") + db_a_m = client_a.get_database(id="id", region="r", keyspace="M") assert db_a_m.keyspace == "M" - db_a_n = client_a.get_database("id", region="r") + db_a_n = client_a.get_database(id="id", region="r") assert db_a_n.keyspace == DEFAULT_ASTRA_DB_KEYSPACE + db_a_me = client_a.get_database(api_ep5643_prod, keyspace="M") + assert db_a_me.keyspace == "M" + db_a_ne = client_a.get_database(api_ep5643_prod) + assert db_a_ne.keyspace == DEFAULT_ASTRA_DB_KEYSPACE client_o = DataAPIClient(environment=Environment.OTHER) db_a_m = client_o.get_database("http://a", keyspace="M")