diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 45ac67ff02..c52fcddf73 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -3,7 +3,7 @@ "name": "Dev Container for Cognite Python SDK", // Python base image reference: https://github.com/devcontainers/images/tree/main/src/python - "image": "mcr.microsoft.com/devcontainers/python:3.8-bullseye", + "image": "mcr.microsoft.com/devcontainers/python:3.10-bullseye", // Features to add to the dev container. More info: https://containers.dev/features "features": { @@ -23,9 +23,8 @@ "streetsidesoftware.code-spell-checker" ], "settings": { - // This is the path to the Poetry enabled Python environment, seen in the "Executable" setting by running "poetry env info" - "python.defaultInterpreterPath": "~/.cache/pypoetry/virtualenvs/cognite-sdk-gQQjLrWz-py3.8/bin/python" + "python.defaultInterpreterPath": "~/.cache/pypoetry/virtualenvs/cognite-sdk-*-py3.10/bin/python" } } } -} \ No newline at end of file +} diff --git a/.devcontainer/postCreateCommand.sh b/.devcontainer/postCreateCommand.sh index e76b3d7eff..27641723bf 100755 --- a/.devcontainer/postCreateCommand.sh +++ b/.devcontainer/postCreateCommand.sh @@ -1,12 +1,13 @@ #!/usr/bin/env bash # Copy in default VSCode settings file as part of initial devcontainer create process. -# This instead of a checked-in .vscode/settings.json file, to not overwrite user provided settings for normal local dev setups. +# This avoids overwriting user-provided settings for normal local dev setups. mkdir -p .vscode cp .devcontainer/vscode.default.settings.json .vscode/settings.json -# Install all dependencies with Poetry -poetry install -E all +# Configure Poetry to create virtual environments inside the project directory +poetry config virtualenvs.in-project true -# Install pre-commit hook +poetry env use python3.10 +poetry install -E all poetry run pre-commit install diff --git a/.github/actions/setup/action.yml b/.github/actions/setup/action.yml index 5ddbe06481..d25908d7bb 100644 --- a/.github/actions/setup/action.yml +++ b/.github/actions/setup/action.yml @@ -4,7 +4,7 @@ inputs: python_version: description: 'Python version to set up' required: false - default: "3.8" + default: "3.10" extras: description: 'extra deps: poetry install -E whatever' required: false diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 26e3d575a5..701f808830 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -46,7 +46,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, windows-latest] - python-version: ["3.8", "3.10", "3.11", "3.12"] # TODO: 3.9, 3.10 (requires a lot of work for FakeCogResGen for tests) + python-version: ["3.10", "3.11", "3.12"] # TODO: 3.9, 3.10 (requires a lot of work for FakeCogResGen for tests) steps: - uses: actions/checkout@v4 - uses: ./.github/actions/setup @@ -68,6 +68,6 @@ jobs: run: pytest --durations=10 --cov --cov-report term --cov-report xml:coverage.xml -n8 --dist loadscope --reruns 2 --maxfail 20 - uses: codecov/codecov-action@v4 - if: matrix.os == 'windows-latest' && matrix.python-version == '3.8' + if: matrix.os == 'windows-latest' && matrix.python-version == '3.10' with: token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/verify-jupyter.yml b/.github/workflows/verify-jupyter.yml index 190a56925b..f1bbda9ab6 100644 --- a/.github/workflows/verify-jupyter.yml +++ b/.github/workflows/verify-jupyter.yml @@ -12,8 +12,8 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: "3.8" - cache: "pip" + python-version: '3.10' + cache: 'pip' - name: Build package using poetry run: | pip install poetry diff --git a/.github/workflows/verify-streamlit.yml b/.github/workflows/verify-streamlit.yml index 4062c70dff..2e4e9723b1 100644 --- a/.github/workflows/verify-streamlit.yml +++ b/.github/workflows/verify-streamlit.yml @@ -12,8 +12,8 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: "3.8" - cache: "pip" + python-version: '3.10' + cache: 'pip' - name: Build package using poetry run: | pip install poetry diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9949730470..10f4425a03 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,11 +8,11 @@ repos: - --fix - --exit-non-zero-on-fix - --line-length=120 - - --ignore=E731,E501,W605 + - --ignore=E731,E501,W605,UP038 # See https://beta.ruff.rs/docs/rules for an overview of ruff rules - --select=E,W,F,I,T,RUF,TID,UP - --fixable=E,W,F,I,T,RUF,TID,UP - - --target-version=py38 + - --target-version=py310 - --exclude=cognite/client/_proto - id: ruff-format args: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 09c9916857..9c7feb2259 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -9,7 +9,7 @@ git clone https://github.com/cognitedata/cognite-sdk-python.git cd cognite-sdk-python ``` -We use [poetry](https://pypi.org/project/poetry/) for dependency- and virtual environment management. Make sure you use python 3.8. +We use [poetry](https://pypi.org/project/poetry/) for dependency- and virtual environment management. Make sure you use python 3.10. Install dependencies and initialize a shell within the virtual environment, with these commands: diff --git a/cognite/client/_api/annotations.py b/cognite/client/_api/annotations.py index 9ca86b0c47..b16c2ca8cc 100644 --- a/cognite/client/_api/annotations.py +++ b/cognite/client/_api/annotations.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import Sequence from copy import deepcopy -from typing import TYPE_CHECKING, Any, Literal, Sequence, cast, overload +from typing import TYPE_CHECKING, Any, Literal, cast, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -138,12 +139,7 @@ def update( Args: item (Annotation | AnnotationWrite | AnnotationUpdate | Sequence[Annotation | AnnotationWrite | AnnotationUpdate]): Annotation or list of annotations to update (or patch or list of patches to apply) - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (Annotation or -Write). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (Annotation or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: Annotation | AnnotationList: No description.""" diff --git a/cognite/client/_api/assets.py b/cognite/client/_api/assets.py index 85f5a1e8ee..47c7b261b4 100644 --- a/cognite/client/_api/assets.py +++ b/cognite/client/_api/assets.py @@ -7,28 +7,20 @@ import operator as op import threading import warnings +from collections.abc import Callable, Iterable, Iterator, Sequence from functools import cached_property from types import MappingProxyType from typing import ( TYPE_CHECKING, Any, - Callable, - Dict, - Iterable, - Iterator, - List, Literal, NamedTuple, NoReturn, - Sequence, - Tuple, - Union, + TypeAlias, cast, overload, ) -from typing_extensions import TypeAlias - from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ from cognite.client.data_classes import ( @@ -73,13 +65,13 @@ AggregateAssetProperty: TypeAlias = Literal["child_count", "path", "depth"] -SortSpec: TypeAlias = Union[ - AssetSort, - str, - SortableAssetProperty, - Tuple[str, Literal["asc", "desc"]], - Tuple[str, Literal["asc", "desc"], Literal["auto", "first", "last"]], -] +SortSpec: TypeAlias = ( + AssetSort + | str + | SortableAssetProperty + | tuple[str, Literal["asc", "desc"]] + | tuple[str, Literal["asc", "desc"], Literal["auto", "first", "last"]] +) _FILTERS_SUPPORTED: frozenset[type[Filter]] = _BASIC_FILTERS | {filters.Search} @@ -610,7 +602,7 @@ def create_hierarchy( Args: assets (Sequence[Asset | AssetWrite] | AssetHierarchy): List of assets to create or an instance of AssetHierarchy. upsert (bool): If used, already existing assets will be updated instead of an exception being raised. You may control how updates are applied with the 'upsert_mode' argument. - upsert_mode (Literal["patch", "replace"]): Only applicable with upsert. Pass 'patch' to only update fields with non-null values (default), or 'replace' to do full updates (unset fields become null or empty). + upsert_mode (Literal['patch', 'replace']): Only applicable with upsert. Pass 'patch' to only update fields with non-null values (default), or 'replace' to do full updates (unset fields become null or empty). Returns: AssetList: Created (and possibly updated) asset hierarchy @@ -778,12 +770,7 @@ def update( Args: item (Asset | AssetWrite | AssetUpdate | Sequence[Asset | AssetWrite | AssetUpdate]): Asset(s) to update - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (Asset or -Write). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (Asset or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: Asset | AssetList: Updated asset(s) @@ -856,7 +843,7 @@ def upsert( Args: item (Asset | AssetWrite | Sequence[Asset | AssetWrite]): Asset or list of assets to upsert. - mode (Literal["patch", "replace"]): Whether to patch or replace in the case the assets are existing. If you set 'patch', the call will only update fields with non-null values (default). Setting 'replace' will unset any fields that are not specified. + mode (Literal['patch', 'replace']): Whether to patch or replace in the case the assets are existing. If you set 'patch', the call will only update fields with non-null values (default). Setting 'replace' will unset any fields that are not specified. Returns: Asset | AssetList: The upserted asset(s). @@ -1324,7 +1311,7 @@ def _insert( return _TaskResult(successful, failed, unknown) # Split assets based on their is-duplicated status: - non_dupes, dupe_assets = self._split_out_duplicated(cast(List[Dict], err.duplicated), assets) + non_dupes, dupe_assets = self._split_out_duplicated(cast(list[dict], err.duplicated), assets) # We should try to create the non-duplicated assets before running update (as these might be dependent): if non_dupes: result = self._insert(non_dupes, no_recursion=True, upsert=False, upsert_mode=upsert_mode) diff --git a/cognite/client/_api/data_modeling/containers.py b/cognite/client/_api/data_modeling/containers.py index 4ef6741d35..d564abba86 100644 --- a/cognite/client/_api/data_modeling/containers.py +++ b/cognite/client/_api/data_modeling/containers.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterator, Literal, Sequence, cast, overload +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, Literal, cast, overload from cognite.client._api_client import APIClient from cognite.client._constants import DATA_MODELING_DEFAULT_LIMIT_READ diff --git a/cognite/client/_api/data_modeling/data_models.py b/cognite/client/_api/data_modeling/data_models.py index 4770e1192a..0057553a9c 100644 --- a/cognite/client/_api/data_modeling/data_models.py +++ b/cognite/client/_api/data_modeling/data_models.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterator, Literal, Sequence, cast, overload +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, Literal, cast, overload from cognite.client._api_client import APIClient from cognite.client._constants import DATA_MODELING_DEFAULT_LIMIT_READ diff --git a/cognite/client/_api/data_modeling/instances.py b/cognite/client/_api/data_modeling/instances.py index 78435c0540..f848aef1d6 100644 --- a/cognite/client/_api/data_modeling/instances.py +++ b/cognite/client/_api/data_modeling/instances.py @@ -4,26 +4,19 @@ import logging import random import time -from collections.abc import Iterable +from collections.abc import Callable, Iterable, Iterator, Sequence from datetime import datetime, timezone from threading import Thread from typing import ( TYPE_CHECKING, Any, - Callable, Generic, - Iterator, - List, Literal, - Sequence, - Tuple, - Union, + TypeAlias, cast, overload, ) -from typing_extensions import TypeAlias - from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ from cognite.client.data_classes import filters @@ -89,7 +82,7 @@ logger = logging.getLogger(__name__) -Source: TypeAlias = Union[SourceSelector, View, ViewId, Tuple[str, str], Tuple[str, str, str]] +Source: TypeAlias = SourceSelector | View | ViewId | tuple[str, str] | tuple[str, str, str] class _NodeOrEdgeResourceAdapter(Generic[T_Node, T_Edge]): @@ -233,7 +226,7 @@ def __call__( Args: chunk_size (int | None): Number of data_models to return in each chunk. Defaults to yielding one instance at a time. - instance_type (Literal["node", "edge"]): Whether to query for nodes or edges. + instance_type (Literal['node', 'edge']): Whether to query for nodes or edges. limit (int | None): Maximum number of instances to return. Defaults to returning all items. include_typing (bool): Whether to return property type information as part of the result. sources (Source | Sequence[Source] | None): Views to retrieve properties from. @@ -259,7 +252,7 @@ def __call__( raise ValueError(f"Invalid instance type: {instance_type}") if not include_typing: return cast( - Union[Iterator[Edge], Iterator[EdgeList], Iterator[Node], Iterator[NodeList]], + Iterator[Edge] | Iterator[EdgeList] | Iterator[Node] | Iterator[NodeList], self._list_generator( list_cls=list_cls, resource_cls=resource_cls, @@ -700,7 +693,7 @@ def delete( """ identifiers = self._load_node_and_edge_ids(nodes, edges) deleted_instances = cast( - List, + list, self._delete_multiple( identifiers, wrap_ids=True, @@ -1057,7 +1050,7 @@ def search( Args: view (ViewId): View to search in. query (str | None): Query string that will be parsed and used for search. - instance_type (Literal["node", "edge"] | type[T_Node] | type[T_Edge]): Whether to search for nodes or edges. You can also pass a custom typed node (or edge class) inheriting from TypedNode (or TypedEdge). See apply, retrieve_nodes or retrieve_edges for an example. + instance_type (Literal['node', 'edge'] | type[T_Node] | type[T_Edge]): Whether to search for nodes or edges. You can also pass a custom typed node (or edge class) inheriting from TypedNode (or TypedEdge). See apply, retrieve_nodes or retrieve_edges for an example. properties (list[str] | None): Optional array of properties you want to search through. If you do not specify one or more properties, the service will search all text fields within the view. target_units (list[TargetUnit] | None): Properties to convert to another unit. The API can only convert to another unit if a unit has been defined as part of the type on the underlying container being queried. space (str | SequenceNotStr[str] | None): Restrict instance search to the given space (or list of spaces). @@ -1201,7 +1194,7 @@ def aggregate( view (ViewId): View to aggregate over. aggregates (MetricAggregation | dict | Sequence[MetricAggregation | dict]): The properties to aggregate over. group_by (str | SequenceNotStr[str] | None): The selection of fields to group the results by when doing aggregations. You can specify up to 5 items to group by. - instance_type (Literal["node", "edge"]): The type of instance. + instance_type (Literal['node', 'edge']): The type of instance. query (str | None): Optional query string. The API will parse the query string, and use it to match the text properties on elements to use for the aggregate(s). properties (str | SequenceNotStr[str] | None): Optional list of properties you want to apply the query to. If you do not list any properties, you search through text fields by default. target_units (list[TargetUnit] | None): Properties to convert to another unit. The API can only convert to another unit if a unit has been defined as part of the type on the underlying container being queried. @@ -1303,7 +1296,7 @@ def histogram( Args: view (ViewId): View to to aggregate over. histograms (Histogram | Sequence[Histogram]): The properties to aggregate over. - instance_type (Literal["node", "edge"]): Whether to search for nodes or edges. + instance_type (Literal['node', 'edge']): Whether to search for nodes or edges. query (str | None): Query string that will be parsed and used for search. properties (SequenceNotStr[str] | None): Optional array of properties you want to search through. If you do not specify one or more properties, the service will search all text fields within the view. target_units (list[TargetUnit] | None): Properties to convert to another unit. The API can only convert to another unit if a unit has been defined as part of the type on the underlying container being queried. @@ -1518,7 +1511,7 @@ def list( """`List instances `_ Args: - instance_type (Literal["node", "edge"] | type[T_Node] | type[T_Edge]): Whether to query for nodes or edges. You can also pass a custom typed node (or edge class) inheriting from TypedNode (or TypedEdge). See apply, retrieve_nodes or retrieve_edges for an example. + instance_type (Literal['node', 'edge'] | type[T_Node] | type[T_Edge]): Whether to query for nodes or edges. You can also pass a custom typed node (or edge class) inheriting from TypedNode (or TypedEdge). See apply, retrieve_nodes or retrieve_edges for an example. include_typing (bool): Whether to return property type information as part of the result. sources (Source | Sequence[Source] | None): Views to retrieve properties from. space (str | SequenceNotStr[str] | None): Only return instances in the given space (or list of spaces). @@ -1585,7 +1578,7 @@ def list( raise ValueError(f"Invalid instance type: {instance_type}") return cast( - Union[NodeList[T_Node], EdgeList[T_Edge]], + NodeList[T_Node] | EdgeList[T_Edge], self._list( list_cls=list_cls, resource_cls=resource_cls, diff --git a/cognite/client/_api/data_modeling/spaces.py b/cognite/client/_api/data_modeling/spaces.py index 359f8bed86..2a46be6c10 100644 --- a/cognite/client/_api/data_modeling/spaces.py +++ b/cognite/client/_api/data_modeling/spaces.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterator, Sequence, cast, overload +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, cast, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ diff --git a/cognite/client/_api/data_modeling/views.py b/cognite/client/_api/data_modeling/views.py index e39d93490a..76f537bc10 100644 --- a/cognite/client/_api/data_modeling/views.py +++ b/cognite/client/_api/data_modeling/views.py @@ -1,7 +1,8 @@ from __future__ import annotations from collections import defaultdict -from typing import TYPE_CHECKING, Iterator, Sequence, cast, overload +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, cast, overload from cognite.client._api_client import APIClient from cognite.client._constants import DATA_MODELING_DEFAULT_LIMIT_READ diff --git a/cognite/client/_api/data_sets.py b/cognite/client/_api/data_sets.py index b054a44487..11fdde1268 100644 --- a/cognite/client/_api/data_sets.py +++ b/cognite/client/_api/data_sets.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Iterator, Literal, Sequence, overload +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, Any, Literal, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -235,12 +236,7 @@ def update( Args: item (DataSet | DataSetWrite | DataSetUpdate | Sequence[DataSet | DataSetWrite | DataSetUpdate]): Data set(s) to update - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (DataSet or -Write). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (DataSet or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: DataSet | DataSetList: Updated data set(s) diff --git a/cognite/client/_api/datapoint_tasks.py b/cognite/client/_api/datapoint_tasks.py index 09703ffe5d..e5288bb63b 100644 --- a/cognite/client/_api/datapoint_tasks.py +++ b/cognite/client/_api/datapoint_tasks.py @@ -6,27 +6,21 @@ import warnings from abc import ABC, abstractmethod from collections import defaultdict +from collections.abc import Callable, Iterator, Sequence from dataclasses import dataclass from functools import cached_property -from itertools import chain +from itertools import chain, pairwise from typing import ( TYPE_CHECKING, Any, - Callable, - DefaultDict, - Iterator, - List, Literal, NoReturn, - Sequence, - Tuple, + TypeAlias, TypeVar, - Union, cast, ) from google.protobuf.internal.containers import RepeatedCompositeFieldContainer -from typing_extensions import TypeAlias from cognite.client._constants import NUMPY_IS_AVAILABLE from cognite.client._proto.data_point_list_response_pb2 import DataPointListItem @@ -75,16 +69,16 @@ NumericDatapoints = RepeatedCompositeFieldContainer[NumericDatapoint] StringDatapoints = RepeatedCompositeFieldContainer[StringDatapoint] -DatapointAny = Union[AggregateDatapoint, NumericDatapoint, StringDatapoint] -DatapointsAny = Union[AggregateDatapoints, NumericDatapoints, StringDatapoints] +DatapointAny = AggregateDatapoint | NumericDatapoint | StringDatapoint +DatapointsAny = AggregateDatapoints | NumericDatapoints | StringDatapoints -DatapointRaw = Union[NumericDatapoint, StringDatapoint] -DatapointsRaw = Union[NumericDatapoints, StringDatapoints] +DatapointRaw = NumericDatapoint | StringDatapoint +DatapointsRaw = NumericDatapoints | StringDatapoints -RawDatapointValue = Union[float, str] -DatapointsId = Union[int, DatapointsQuery, Sequence[Union[int, DatapointsQuery]]] -DatapointsExternalId = Union[str, DatapointsQuery, SequenceNotStr[Union[str, DatapointsQuery]]] -DatapointsInstanceId = Union[NodeId, DatapointsQuery, Sequence[Union[NodeId, DatapointsQuery]]] +RawDatapointValue = float | str +DatapointsId = int | DatapointsQuery | Sequence[int | DatapointsQuery] +DatapointsExternalId = str | DatapointsQuery | SequenceNotStr[str | DatapointsQuery] +DatapointsInstanceId = NodeId | DatapointsQuery | Sequence[NodeId | DatapointsQuery] @dataclass @@ -475,7 +469,7 @@ def get_ts_info_from_proto(res: DataPointListItem) -> dict[str, int | str | bool } -_DataContainer: TypeAlias = DefaultDict[Tuple[float, ...], List] +_DataContainer: TypeAlias = defaultdict[tuple[float, ...], list] def datapoints_in_order(container: _DataContainer) -> Iterator[list]: @@ -1021,7 +1015,7 @@ def _create_uniformly_split_subtasks(self, n_workers_per_queries: int) -> list[B boundaries = split_time_range(start, end, n_periods, self.offset_next) return [ SplittingFetchSubtask(start=start, end=end, subtask_idx=(i,), parent=self) - for i, (start, end) in enumerate(zip(boundaries[:-1], boundaries[1:]), 1) + for i, (start, end) in enumerate(pairwise(boundaries), 1) ] diff --git a/cognite/client/_api/datapoints.py b/cognite/client/_api/datapoints.py index d3d64f5c4a..03e825ffd6 100644 --- a/cognite/client/_api/datapoints.py +++ b/cognite/client/_api/datapoints.py @@ -9,26 +9,20 @@ import warnings from abc import ABC, abstractmethod from collections import defaultdict +from collections.abc import Callable, Iterable, Iterator, MutableSequence, Sequence from itertools import chain from operator import itemgetter from typing import ( TYPE_CHECKING, Any, - Callable, - Iterable, - Iterator, - List, Literal, - MutableSequence, NamedTuple, - Sequence, - Tuple, + TypeGuard, TypeVar, - Union, cast, ) -from typing_extensions import Self, TypeGuard +from typing_extensions import Self from cognite.client._api.datapoint_tasks import ( BaseDpsFetchSubtask, @@ -83,8 +77,8 @@ as_completed = import_as_completed() -_TSQueryList = List[DatapointsQuery] -PoolSubtaskType = Tuple[float, int, BaseDpsFetchSubtask] +_TSQueryList = list[DatapointsQuery] +PoolSubtaskType = tuple[float, int, BaseDpsFetchSubtask] _T = TypeVar("_T") _TResLst = TypeVar("_TResLst", DatapointsList, DatapointsArrayList) @@ -939,7 +933,7 @@ def retrieve_dataframe( uniform_index (bool): If only querying aggregates AND a single granularity is used AND no limit is used, specifying `uniform_index=True` will return a dataframe with an equidistant datetime index from the earliest `start` to the latest `end` (missing values will be NaNs). If these requirements are not met, a ValueError is raised. Default: False include_aggregate_name (bool): Include 'aggregate' in the column name, e.g. `my-ts|average`. Ignored for raw time series. Default: True include_granularity_name (bool): Include 'granularity' in the column name, e.g. `my-ts|12h`. Added after 'aggregate' when present. Ignored for raw time series. Default: False - column_names (Literal["id", "external_id", "instance_id"]): Use either instance IDs, external IDs or IDs as column names. Time series missing instance ID will use external ID if it exists then ID as backup. Default: "instance_id" + column_names (Literal['id', 'external_id', 'instance_id']): Use either instance IDs, external IDs or IDs as column names. Time series missing instance ID will use external ID if it exists then ID as backup. Default: "instance_id" Returns: pd.DataFrame: A pandas DataFrame containing the requested time series. The ordering of columns is ids first, then external_ids. For time series with multiple aggregates, they will be sorted in alphabetical order ("average" before "max"). @@ -1076,11 +1070,8 @@ def retrieve_dataframe_in_tz( external_id (str | SequenceNotStr[str] | None): External ID or list of External IDs. start (datetime.datetime): Inclusive start, must be timezone aware. end (datetime.datetime): Exclusive end, must be timezone aware and have the same timezone as start. - aggregates (Aggregate | str | list[Aggregate | str] | None): Single aggregate or list of aggregates to retrieve. Available options: ``average``, ``continuous_variance``, ``count``, ``count_bad``, ``count_good``, - ``count_uncertain``, ``discrete_variance``, ``duration_bad``, ``duration_good``, ``duration_uncertain``, ``interpolation``, ``max``, ``min``, ``step_interpolation``, ``sum`` and ``total_variation``. - Default: None (raw datapoints returned) - granularity (str | None): The granularity to fetch aggregates at. Can be given as an abbreviation or spelled out for clarity: ``s/second(s)``, ``m/minute(s)``, ``h/hour(s)``, ``d/day(s)``, ``w/week(s)``, ``mo/month(s)``, - ``q/quarter(s)``, or ``y/year(s)``. Examples: ``30s``, ``5m``, ``1day``, ``2weeks``. Default: None. + aggregates (Aggregate | str | list[Aggregate | str] | None): Single aggregate or list of aggregates to retrieve. Available options: ``average``, ``continuous_variance``, ``count``, ``count_bad``, ``count_good``, ``count_uncertain``, ``discrete_variance``, ``duration_bad``, ``duration_good``, ``duration_uncertain``, ``interpolation``, ``max``, ``min``, ``step_interpolation``, ``sum`` and ``total_variation``. Default: None (raw datapoints returned) + granularity (str | None): The granularity to fetch aggregates at. Can be given as an abbreviation or spelled out for clarity: ``s/second(s)``, ``m/minute(s)``, ``h/hour(s)``, ``d/day(s)``, ``w/week(s)``, ``mo/month(s)``, ``q/quarter(s)``, or ``y/year(s)``. Examples: ``30s``, ``5m``, ``1day``, ``2weeks``. Default: None. target_unit (str | None): The unit_external_id of the datapoints returned. If the time series does not have a unit_external_id that can be converted to the target_unit, an error will be returned. Cannot be used with target_unit_system. target_unit_system (str | None): The unit system of the datapoints returned. Cannot be used with target_unit. ignore_unknown_ids (bool): Whether to ignore missing time series rather than raising an exception. Default: False @@ -1090,7 +1081,7 @@ def retrieve_dataframe_in_tz( uniform_index (bool): If querying aggregates with a non-calendar granularity, specifying ``uniform_index=True`` will return a dataframe with an index with constant spacing between timestamps decided by granularity all the way from `start` to `end` (missing values will be NaNs). Default: False include_aggregate_name (bool): Include 'aggregate' in the column name, e.g. `my-ts|average`. Ignored for raw time series. Default: True include_granularity_name (bool): Include 'granularity' in the column name, e.g. `my-ts|12h`. Added after 'aggregate' when present. Ignored for raw time series. Default: False - column_names (Literal["id", "external_id"]): Use either ids or external ids as column names. Time series missing external id will use id as backup. Default: "external_id" + column_names (Literal['id', 'external_id']): Use either ids or external ids as column names. Time series missing external id will use id as backup. Default: "external_id" Returns: pd.DataFrame: A pandas DataFrame containing the requested time series with a DatetimeIndex localized in the given timezone. @@ -1758,8 +1749,8 @@ def __init__( self.ignore_unknown_ids = ignore_unknown_ids self.dps_client = dps_client - parsed_ids = cast(Union[None, int, Sequence[int]], self._parse_user_input(id, "id")) - parsed_xids = cast(Union[None, str, SequenceNotStr[str]], self._parse_user_input(external_id, "external_id")) + parsed_ids = cast(None | int | Sequence[int], self._parse_user_input(id, "id")) + parsed_xids = cast(None | str | SequenceNotStr[str], self._parse_user_input(external_id, "external_id")) self._is_singleton = IdentifierSequence.load(parsed_ids, parsed_xids).is_singleton() self._all_identifiers = self._prepare_requests(parsed_ids, parsed_xids) diff --git a/cognite/client/_api/datapoints_subscriptions.py b/cognite/client/_api/datapoints_subscriptions.py index d676d3af5e..672ff87c3c 100644 --- a/cognite/client/_api/datapoints_subscriptions.py +++ b/cognite/client/_api/datapoints_subscriptions.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterator, Literal, cast, overload +from collections.abc import Iterator +from typing import TYPE_CHECKING, Literal, cast, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -204,12 +205,7 @@ def update( Args: update (DataPointSubscriptionUpdate | DataPointSubscriptionWrite): The subscription update. - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (DataPointSubscriptionWrite). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (DataPointSubscriptionWrite). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. Returns: DatapointSubscription: Updated subscription. diff --git a/cognite/client/_api/diagrams.py b/cognite/client/_api/diagrams.py index 01da62154e..d01441741f 100644 --- a/cognite/client/_api/diagrams.py +++ b/cognite/client/_api/diagrams.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import Sequence from math import ceil -from typing import TYPE_CHECKING, Any, Literal, Sequence, TypeVar, cast, overload +from typing import TYPE_CHECKING, Any, Literal, TypeVar, cast, overload from requests import Response diff --git a/cognite/client/_api/documents.py b/cognite/client/_api/documents.py index c0c2fa3ddc..8a8d4a58b6 100644 --- a/cognite/client/_api/documents.py +++ b/cognite/client/_api/documents.py @@ -210,7 +210,7 @@ def __call__( Args: chunk_size (int | None): Number of documents to return in each chunk. Defaults to yielding one document at a time. filter (Filter | dict[str, Any] | None): The filter to narrow down the documents to return. - sort (DocumentSort | SortableProperty | tuple[SortableProperty, Literal["asc", "desc"]] | None): The property to sort by. The default order is ascending. + sort (DocumentSort | SortableProperty | tuple[SortableProperty, Literal['asc', 'desc']] | None): The property to sort by. The default order is ascending. limit (int | None): Maximum number of documents to return. Default to return all items. partitions (int | None): Retrieve documents in parallel using this number of workers. Also requires `limit=None` to be passed. To prevent unexpected problems and maximize read throughput, API documentation recommends at most use 10 partitions. When using more than 10 partitions, actual throughout decreases. In future releases of the APIs, CDF may reject requests with more than 10 partitions. @@ -573,7 +573,7 @@ def search( query (str): The free text search query. highlight (bool): Whether or not matches in search results should be highlighted. filter (Filter | dict[str, Any] | None): The filter to narrow down the documents to search. - sort (DocumentSort | SortableProperty | tuple[SortableProperty, Literal["asc", "desc"]] | None): The property to sort by. The default order is ascending. + sort (DocumentSort | SortableProperty | tuple[SortableProperty, Literal['asc', 'desc']] | None): The property to sort by. The default order is ascending. limit (int): Maximum number of items to return. When using highlights, the maximum value is reduced to 20. Defaults to 25. Returns: @@ -649,7 +649,7 @@ def list( Args: filter (Filter | dict[str, Any] | None): Filter | dict[str, Any] | None): The filter to narrow down the documents to return. - sort (DocumentSort | SortableProperty | tuple[SortableProperty, Literal["asc", "desc"]] | None): The property to sort by. The default order is ascending. + sort (DocumentSort | SortableProperty | tuple[SortableProperty, Literal['asc', 'desc']] | None): The property to sort by. The default order is ascending. limit (int | None): Maximum number of documents to return. Defaults to 25. Set to None or -1 to return all documents. Returns: diff --git a/cognite/client/_api/entity_matching.py b/cognite/client/_api/entity_matching.py index 5bed0e5bf5..3129e98c44 100644 --- a/cognite/client/_api/entity_matching.py +++ b/cognite/client/_api/entity_matching.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Any, Literal, Sequence, TypeVar +from collections.abc import Sequence +from typing import Any, Literal, TypeVar from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -95,12 +96,7 @@ def update( Args: item (EntityMatchingModel | EntityMatchingModelUpdate | Sequence[EntityMatchingModel | EntityMatchingModelUpdate]): Model(s) to update - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (EntityMatchingModel). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (EntityMatchingModel). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: EntityMatchingModelList | EntityMatchingModel: No description. diff --git a/cognite/client/_api/events.py b/cognite/client/_api/events.py index 1ab24d3193..5ffcd53dc3 100644 --- a/cognite/client/_api/events.py +++ b/cognite/client/_api/events.py @@ -1,9 +1,8 @@ from __future__ import annotations import warnings -from typing import Any, Iterator, Literal, Sequence, Tuple, Union, overload - -from typing_extensions import TypeAlias +from collections.abc import Iterator, Sequence +from typing import Any, Literal, TypeAlias, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -24,13 +23,13 @@ from cognite.client.utils._validation import prepare_filter_sort, process_asset_subtree_ids, process_data_set_ids from cognite.client.utils.useful_types import SequenceNotStr -SortSpec: TypeAlias = Union[ - EventSort, - str, - SortableEventProperty, - Tuple[str, Literal["asc", "desc"]], - Tuple[str, Literal["asc", "desc"], Literal["auto", "first", "last"]], -] +SortSpec: TypeAlias = ( + EventSort + | str + | SortableEventProperty + | tuple[str, Literal["asc", "desc"]] + | tuple[str, Literal["asc", "desc"], Literal["auto", "first", "last"]] +) _FILTERS_SUPPORTED: frozenset[type[Filter]] = _BASIC_FILTERS | {filters.Search} @@ -577,12 +576,7 @@ def update( Args: item (Event | EventWrite | EventUpdate | Sequence[Event | EventWrite | EventUpdate]): Event(s) to update - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (Event or -Write). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (Event or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: Event | EventList: Updated event(s) @@ -653,7 +647,7 @@ def upsert( Args: item (Event | EventWrite | Sequence[Event | EventWrite]): Event or list of events to upsert. - mode (Literal["patch", "replace"]): Whether to patch or replace in the case the events are existing. If you set 'patch', the call will only update fields with non-null values (default). Setting 'replace' will unset any fields that are not specified. + mode (Literal['patch', 'replace']): Whether to patch or replace in the case the events are existing. If you set 'patch', the call will only update fields with non-null values (default). Setting 'replace' will unset any fields that are not specified. Returns: Event | EventList: The upserted event(s). diff --git a/cognite/client/_api/extractionpipelines.py b/cognite/client/_api/extractionpipelines.py index 65650f2c1b..e9dcdbd18b 100644 --- a/cognite/client/_api/extractionpipelines.py +++ b/cognite/client/_api/extractionpipelines.py @@ -1,9 +1,7 @@ from __future__ import annotations -from collections.abc import Iterator -from typing import TYPE_CHECKING, Any, Literal, Sequence, Union, cast, overload - -from typing_extensions import TypeAlias +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, Any, Literal, TypeAlias, cast, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -251,12 +249,7 @@ def update( Args: item (ExtractionPipeline | ExtractionPipelineWrite | ExtractionPipelineUpdate | Sequence[ExtractionPipeline | ExtractionPipelineWrite | ExtractionPipelineUpdate]): Extraction pipeline(s) to update - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (ExtractionPipeline or -Write). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (ExtractionPipeline or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: ExtractionPipeline | ExtractionPipelineList: Updated extraction pipeline(s) @@ -337,7 +330,7 @@ def list( if statuses is not None or message_substring is not None or created_time is not None: filter = ExtractionPipelineRunFilter( external_id=external_id, - statuses=cast(Union[SequenceNotStr[str], None], [statuses] if isinstance(statuses, str) else statuses), + statuses=cast(SequenceNotStr[str] | None, [statuses] if isinstance(statuses, str) else statuses), message=StringFilter(substring=message_substring), created_time=created_time, ).dump(camel_case=True) diff --git a/cognite/client/_api/files.py b/cognite/client/_api/files.py index 745d9ffbd1..d95860c99a 100644 --- a/cognite/client/_api/files.py +++ b/cognite/client/_api/files.py @@ -4,9 +4,10 @@ import os import warnings from collections import defaultdict +from collections.abc import Iterator, Sequence from io import BufferedReader from pathlib import Path -from typing import Any, BinaryIO, Iterator, Literal, Sequence, TextIO, cast, overload +from typing import Any, BinaryIO, Literal, TextIO, cast, overload from urllib.parse import urljoin, urlparse from cognite.client._api_client import APIClient @@ -386,7 +387,7 @@ def update( Args: item (FileMetadata | FileMetadataWrite | FileMetadataUpdate | Sequence[FileMetadata | FileMetadataWrite | FileMetadataUpdate]): file(s) to update. - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update object is given (FilesMetadata or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (FilesMetadata or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: FileMetadata | FileMetadataList: The updated files. diff --git a/cognite/client/_api/functions.py b/cognite/client/_api/functions.py index 70ef65d464..d1d4a4b10b 100644 --- a/cognite/client/_api/functions.py +++ b/cognite/client/_api/functions.py @@ -7,12 +7,12 @@ import sys import textwrap import time -from collections.abc import Iterator +from collections.abc import Callable, Iterator, Sequence from inspect import getdoc, getsource, signature from multiprocessing import Process, Queue from pathlib import Path from tempfile import TemporaryDirectory -from typing import TYPE_CHECKING, Any, Callable, Literal, NoReturn, Sequence, cast, overload +from typing import TYPE_CHECKING, Any, Literal, NoReturn, cast, overload from zipfile import ZipFile from cognite.client._api_client import APIClient @@ -158,7 +158,7 @@ def __call__( file_id (int | None): The file ID of the zip-file used to create the function. status (FunctionStatus | None): Status of the function. Possible values: ["Queued", "Deploying", "Ready", "Failed"]. external_id_prefix (str | None): External ID prefix to filter on. - created_time (dict[Literal["min", "max"], int] | TimestampRange | None): Range between two timestamps. Possible keys are `min` and `max`, with values given as time stamps in ms. + created_time (dict[Literal['min', 'max'], int] | TimestampRange | None): Range between two timestamps. Possible keys are `min` and `max`, with values given as time stamps in ms. metadata (dict[str, str] | None): No description. limit (int | None): Maximum number of functions to return. Defaults to yielding all functions. @@ -415,7 +415,7 @@ def list( file_id (int | None): The file ID of the zip-file used to create the function. status (FunctionStatus | None): Status of the function. Possible values: ["Queued", "Deploying", "Ready", "Failed"]. external_id_prefix (str | None): External ID prefix to filter on. - created_time (dict[Literal["min", "max"], int] | TimestampRange | None): Range between two timestamps. Possible keys are `min` and `max`, with values given as time stamps in ms. + created_time (dict[Literal['min', 'max'], int] | TimestampRange | None): Range between two timestamps. Possible keys are `min` and `max`, with values given as time stamps in ms. metadata (dict[str, str] | None): Custom, application-specific metadata. String key -> String value. Limits: Maximum length of key is 32, value 512 characters, up to 16 key-value pairs. Maximum size of entire metadata is 4096 bytes. limit (int | None): Maximum number of functions to return. Pass in -1, float('inf') or None to list all. diff --git a/cognite/client/_api/geospatial.py b/cognite/client/_api/geospatial.py index f7e153ce58..5e18a96e54 100644 --- a/cognite/client/_api/geospatial.py +++ b/cognite/client/_api/geospatial.py @@ -2,7 +2,8 @@ import numbers import urllib.parse -from typing import Any, Iterator, Sequence, cast, overload +from collections.abc import Iterator, Sequence +from typing import Any, cast, overload from requests.exceptions import ChunkedEncodingError diff --git a/cognite/client/_api/hosted_extractors/destinations.py b/cognite/client/_api/hosted_extractors/destinations.py index 579531687b..d6c9536fac 100644 --- a/cognite/client/_api/hosted_extractors/destinations.py +++ b/cognite/client/_api/hosted_extractors/destinations.py @@ -1,7 +1,7 @@ from __future__ import annotations -from collections.abc import Iterator -from typing import TYPE_CHECKING, Any, Literal, Sequence, overload +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, Any, Literal, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -215,12 +215,7 @@ def update( Args: items (DestinationWrite | DestinationUpdate | Sequence[DestinationWrite | DestinationUpdate]): Destination(s) to update. - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (DestinationWrite). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (DestinationWrite). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: Destination | DestinationList: Updated destination(s) diff --git a/cognite/client/_api/hosted_extractors/jobs.py b/cognite/client/_api/hosted_extractors/jobs.py index 1cb673a4de..890f0e2ab7 100644 --- a/cognite/client/_api/hosted_extractors/jobs.py +++ b/cognite/client/_api/hosted_extractors/jobs.py @@ -1,7 +1,7 @@ from __future__ import annotations -from collections.abc import Iterator -from typing import TYPE_CHECKING, Any, Literal, Sequence, overload +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, Any, Literal, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -216,12 +216,7 @@ def update( Args: items (JobWrite | JobUpdate | Sequence[JobWrite | JobUpdate]): Job(s) to update. - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (JobWrite). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (JobWrite). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: Job | JobList: Updated job(s) diff --git a/cognite/client/_api/hosted_extractors/mappings.py b/cognite/client/_api/hosted_extractors/mappings.py index a9a799a159..a2f16d878c 100644 --- a/cognite/client/_api/hosted_extractors/mappings.py +++ b/cognite/client/_api/hosted_extractors/mappings.py @@ -1,7 +1,7 @@ from __future__ import annotations -from collections.abc import Iterator -from typing import TYPE_CHECKING, Any, Sequence, overload +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, Any, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ diff --git a/cognite/client/_api/hosted_extractors/sources.py b/cognite/client/_api/hosted_extractors/sources.py index 6e57deb351..6304f5e9b9 100644 --- a/cognite/client/_api/hosted_extractors/sources.py +++ b/cognite/client/_api/hosted_extractors/sources.py @@ -1,7 +1,7 @@ from __future__ import annotations -from collections.abc import Iterator -from typing import TYPE_CHECKING, Any, Literal, Sequence, overload +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, Any, Literal, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -208,12 +208,7 @@ def update( Args: items (SourceWrite | SourceUpdate | Sequence[SourceWrite | SourceUpdate]): Source(s) to update. - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (SourceWrite). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (SourceWrite). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: Source | SourceList: Updated source(s) diff --git a/cognite/client/_api/iam.py b/cognite/client/_api/iam.py index 9eee4608fb..101481118f 100644 --- a/cognite/client/_api/iam.py +++ b/cognite/client/_api/iam.py @@ -1,11 +1,10 @@ from __future__ import annotations import warnings +from collections.abc import Iterable, Sequence from itertools import groupby from operator import itemgetter -from typing import TYPE_CHECKING, Any, Dict, Iterable, Literal, Sequence, Union, overload - -from typing_extensions import TypeAlias +from typing import TYPE_CHECKING, Any, Literal, TypeAlias, overload from cognite.client._api.user_profiles import UserProfilesAPI from cognite.client._api_client import APIClient @@ -47,16 +46,16 @@ from cognite.client import CogniteClient -ComparableCapability: TypeAlias = Union[ - Capability, - Sequence[Capability], - Dict[str, Any], - Sequence[Dict[str, Any]], - Group, - GroupList, - ProjectCapability, - ProjectCapabilityList, -] +ComparableCapability: TypeAlias = ( + Capability + | Sequence[Capability] + | dict[str, Any] + | Sequence[dict[str, Any]] + | Group + | GroupList + | ProjectCapability + | ProjectCapabilityList +) def _convert_capability_to_tuples( diff --git a/cognite/client/_api/labels.py b/cognite/client/_api/labels.py index edfed83d79..009825cd54 100644 --- a/cognite/client/_api/labels.py +++ b/cognite/client/_api/labels.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Iterator, Literal, Sequence, overload +from collections.abc import Iterator, Sequence +from typing import Literal, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ diff --git a/cognite/client/_api/postgres_gateway/users.py b/cognite/client/_api/postgres_gateway/users.py index 052fa991be..03bf018ef5 100644 --- a/cognite/client/_api/postgres_gateway/users.py +++ b/cognite/client/_api/postgres_gateway/users.py @@ -1,7 +1,7 @@ from __future__ import annotations -from collections.abc import Iterator -from typing import TYPE_CHECKING, Sequence, overload +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ diff --git a/cognite/client/_api/raw.py b/cognite/client/_api/raw.py index 3bad1f2cf3..3413a55450 100644 --- a/cognite/client/_api/raw.py +++ b/cognite/client/_api/raw.py @@ -5,7 +5,8 @@ import threading import time from collections import defaultdict, deque -from typing import TYPE_CHECKING, Any, Iterator, Sequence, cast, overload +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, Any, cast, overload from cognite.client._api_client import APIClient from cognite.client._constants import _RUNNING_IN_BROWSER, DEFAULT_LIMIT_READ diff --git a/cognite/client/_api/relationships.py b/cognite/client/_api/relationships.py index aaec16622d..075a2560bb 100644 --- a/cognite/client/_api/relationships.py +++ b/cognite/client/_api/relationships.py @@ -2,8 +2,9 @@ import itertools import warnings +from collections.abc import Iterator, Sequence from functools import partial -from typing import TYPE_CHECKING, Iterator, Literal, Sequence, overload +from typing import TYPE_CHECKING, Literal, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -428,12 +429,7 @@ def update( Args: item (Relationship | RelationshipWrite | RelationshipUpdate | Sequence[Relationship | RelationshipWrite | RelationshipUpdate]): Relationship(s) to update - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (Relationship or -Write). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (Relationship or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: Relationship | RelationshipList: Updated relationship(s) @@ -501,7 +497,7 @@ def upsert( Args: item (Relationship | RelationshipWrite | Sequence[Relationship | RelationshipWrite]): Relationship or list of relationships to upsert. - mode (Literal["patch", "replace"]): Whether to patch or replace in the case the relationships are existing. If you set 'patch', the call will only update fields with non-null values (default). Setting 'replace' will unset any fields that are not specified. + mode (Literal['patch', 'replace']): Whether to patch or replace in the case the relationships are existing. If you set 'patch', the call will only update fields with non-null values (default). Setting 'replace' will unset any fields that are not specified. Returns: Relationship | RelationshipList: The upserted relationship(s). diff --git a/cognite/client/_api/sequences.py b/cognite/client/_api/sequences.py index e114b8d492..6787971427 100644 --- a/cognite/client/_api/sequences.py +++ b/cognite/client/_api/sequences.py @@ -3,9 +3,8 @@ import math import typing import warnings -from typing import TYPE_CHECKING, Any, Iterator, Literal, Tuple, Union, overload - -from typing_extensions import TypeAlias +from collections.abc import Iterator +from typing import TYPE_CHECKING, Any, Literal, TypeAlias, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -45,13 +44,13 @@ from cognite.client import CogniteClient from cognite.client.config import ClientConfig -SortSpec: TypeAlias = Union[ - SequenceSort, - str, - SortableSequenceProperty, - Tuple[str, Literal["asc", "desc"]], - Tuple[str, Literal["asc", "desc"], Literal["auto", "first", "last"]], -] +SortSpec: TypeAlias = ( + SequenceSort + | str + | SortableSequenceProperty + | tuple[str, Literal["asc", "desc"]] + | tuple[str, Literal["asc", "desc"], Literal["auto", "first", "last"]] +) _FILTERS_SUPPORTED: frozenset[type[Filter]] = _BASIC_FILTERS | {filters.Search} @@ -596,12 +595,7 @@ def update( Args: item (Sequence | SequenceWrite | SequenceUpdate | typing.Sequence[Sequence | SequenceWrite | SequenceUpdate]): Sequences to update - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (Sequence or -Write). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (Sequence or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: Sequence | SequenceList: Updated sequences. @@ -705,7 +699,7 @@ def upsert( Args: item (Sequence | SequenceWrite | typing.Sequence[Sequence | SequenceWrite]): Sequence or list of sequences to upsert. - mode (Literal["patch", "replace"]): Whether to patch or replace in the case the sequences are existing. If you set 'patch', the call will only update fields with non-null values (default). Setting 'replace' will unset any fields that are not specified. + mode (Literal['patch', 'replace']): Whether to patch or replace in the case the sequences are existing. If you set 'patch', the call will only update fields with non-null values (default). Setting 'replace' will unset any fields that are not specified. Returns: Sequence | SequenceList: The upserted sequence(s). diff --git a/cognite/client/_api/synthetic_time_series.py b/cognite/client/_api/synthetic_time_series.py index 0aac9c019a..7fadfbece3 100644 --- a/cognite/client/_api/synthetic_time_series.py +++ b/cognite/client/_api/synthetic_time_series.py @@ -1,8 +1,9 @@ from __future__ import annotations import re +from collections.abc import Sequence from datetime import datetime -from typing import TYPE_CHECKING, Any, Sequence, Union, cast +from typing import TYPE_CHECKING, Any, Union, cast from cognite.client._api_client import APIClient from cognite.client.data_classes import Datapoints, DatapointsList, TimeSeries, TimeSeriesWrite diff --git a/cognite/client/_api/templates.py b/cognite/client/_api/templates.py index 66e029b277..cd45e6d4a5 100644 --- a/cognite/client/_api/templates.py +++ b/cognite/client/_api/templates.py @@ -1,7 +1,8 @@ from __future__ import annotations import warnings -from typing import TYPE_CHECKING, Any, List, Sequence, cast +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any, cast from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -145,9 +146,9 @@ def upsert(self, template_groups: TemplateGroup | Sequence[TemplateGroup]) -> Te path = self._RESOURCE_PATH + "/upsert" is_single = not isinstance(template_groups, list) if is_single: - template_groups_processed: list[TemplateGroup] = cast(List[TemplateGroup], [template_groups]) + template_groups_processed: list[TemplateGroup] = cast(list[TemplateGroup], [template_groups]) else: - template_groups_processed = cast(List[TemplateGroup], template_groups) + template_groups_processed = cast(list[TemplateGroup], template_groups) updated = self._post( path, {"items": [item.dump(camel_case=True) for item in template_groups_processed]} ).json()["items"] diff --git a/cognite/client/_api/three_d.py b/cognite/client/_api/three_d.py index b437ccf8f9..6e98103e12 100644 --- a/cognite/client/_api/three_d.py +++ b/cognite/client/_api/three_d.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterator, Literal, Sequence, overload +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, Literal, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -215,12 +216,7 @@ def update( Args: item (ThreeDModel | ThreeDModelUpdate | Sequence[ThreeDModel | ThreeDModelUpdate]): ThreeDModel(s) to update - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (ThreeDModel or -Write). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (ThreeDModel or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: ThreeDModel | ThreeDModelList: Updated ThreeDModel(s) @@ -421,12 +417,7 @@ def update( Args: model_id (int): Update the revision under the model with this id. item (ThreeDModelRevision | ThreeDModelRevisionUpdate | Sequence[ThreeDModelRevision | ThreeDModelRevisionUpdate]): ThreeDModelRevision(s) to update - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (ThreeDModelRevision or -Write). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (ThreeDModelRevision or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: ThreeDModelRevision | ThreeDModelRevisionList: Updated ThreeDModelRevision(s) diff --git a/cognite/client/_api/time_series.py b/cognite/client/_api/time_series.py index 8df0fa8c44..fc4c3024b7 100644 --- a/cognite/client/_api/time_series.py +++ b/cognite/client/_api/time_series.py @@ -1,9 +1,8 @@ from __future__ import annotations import warnings -from typing import TYPE_CHECKING, Any, Iterator, Literal, Sequence, Tuple, Union, overload - -from typing_extensions import TypeAlias +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, Any, Literal, TypeAlias, overload from cognite.client._api.datapoints import DatapointsAPI from cognite.client._api.datapoints_subscriptions import DatapointsSubscriptionAPI @@ -33,13 +32,13 @@ from cognite.client import CogniteClient from cognite.client.config import ClientConfig -SortSpec: TypeAlias = Union[ - TimeSeriesSort, - str, - SortableTimeSeriesProperty, - Tuple[str, Literal["asc", "desc"]], - Tuple[str, Literal["asc", "desc"], Literal["auto", "first", "last"]], -] +SortSpec: TypeAlias = ( + TimeSeriesSort + | str + | SortableTimeSeriesProperty + | tuple[str, Literal["asc", "desc"]] + | tuple[str, Literal["asc", "desc"], Literal["auto", "first", "last"]] +) _FILTERS_SUPPORTED: frozenset[type[Filter]] = _BASIC_FILTERS | {filters.Search} @@ -604,12 +603,7 @@ def update( Args: item (TimeSeries | TimeSeriesWrite | TimeSeriesUpdate | Sequence[TimeSeries | TimeSeriesWrite | TimeSeriesUpdate]): Time series to update - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update - object is given (TimeSeries or -Write). If you use 'replace_ignore_null', only the fields - you have set will be used to replace existing (default). Using 'replace' will additionally - clear all the fields that are not specified by you. Last option, 'patch', will update only - the fields you have set and for container-like fields such as metadata or labels, add the - values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (TimeSeries or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: TimeSeries | TimeSeriesList: Updated time series. @@ -675,7 +669,7 @@ def upsert( Args: item (TimeSeries | TimeSeriesWrite | Sequence[TimeSeries | TimeSeriesWrite]): TimeSeries or list of TimeSeries to upsert. - mode (Literal["patch", "replace"]): Whether to patch or replace in the case the time series are existing. If you set 'patch', the call will only update fields with non-null values (default). Setting 'replace' will unset any fields that are not specified. + mode (Literal['patch', 'replace']): Whether to patch or replace in the case the time series are existing. If you set 'patch', the call will only update fields with non-null values (default). Setting 'replace' will unset any fields that are not specified. Returns: TimeSeries | TimeSeriesList: The upserted time series(s). diff --git a/cognite/client/_api/transformations/__init__.py b/cognite/client/_api/transformations/__init__.py index f4f9f82a49..f15ab4b759 100644 --- a/cognite/client/_api/transformations/__init__.py +++ b/cognite/client/_api/transformations/__init__.py @@ -1,7 +1,7 @@ from __future__ import annotations -from collections.abc import Iterator -from typing import TYPE_CHECKING, Any, Literal, Sequence, overload +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, Any, Literal, overload from cognite.client._api.transformations.jobs import TransformationJobsAPI from cognite.client._api.transformations.notifications import TransformationNotificationsAPI @@ -440,7 +440,7 @@ def update( Args: item (Transformation | TransformationWrite | TransformationUpdate | Sequence[Transformation | TransformationWrite | TransformationUpdate]): Transformation(s) to update - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update object is given (Transformation or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (Transformation or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: Transformation | TransformationList: Updated transformation(s) diff --git a/cognite/client/_api/transformations/jobs.py b/cognite/client/_api/transformations/jobs.py index 017bff0fcf..66a52eb73b 100644 --- a/cognite/client/_api/transformations/jobs.py +++ b/cognite/client/_api/transformations/jobs.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Sequence +from collections.abc import Sequence from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ diff --git a/cognite/client/_api/transformations/notifications.py b/cognite/client/_api/transformations/notifications.py index d23e35787b..4c2e1c27a2 100644 --- a/cognite/client/_api/transformations/notifications.py +++ b/cognite/client/_api/transformations/notifications.py @@ -1,7 +1,7 @@ from __future__ import annotations -from collections.abc import Iterator -from typing import Sequence, overload +from collections.abc import Iterator, Sequence +from typing import overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ diff --git a/cognite/client/_api/transformations/schedules.py b/cognite/client/_api/transformations/schedules.py index 2f43f89bd9..cdd5413c96 100644 --- a/cognite/client/_api/transformations/schedules.py +++ b/cognite/client/_api/transformations/schedules.py @@ -1,7 +1,7 @@ from __future__ import annotations -from collections.abc import Iterator -from typing import TYPE_CHECKING, Literal, Sequence, overload +from collections.abc import Iterator, Sequence +from typing import TYPE_CHECKING, Literal, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -258,7 +258,7 @@ def update( Args: item (TransformationSchedule | TransformationScheduleWrite | TransformationScheduleUpdate | Sequence[TransformationSchedule | TransformationScheduleWrite | TransformationScheduleUpdate]): Transformation schedule(s) to update - mode (Literal["replace_ignore_null", "patch", "replace"]): How to update data when a non-update object is given (TransformationSchedule or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. + mode (Literal['replace_ignore_null', 'patch', 'replace']): How to update data when a non-update object is given (TransformationSchedule or -Write). If you use 'replace_ignore_null', only the fields you have set will be used to replace existing (default). Using 'replace' will additionally clear all the fields that are not specified by you. Last option, 'patch', will update only the fields you have set and for container-like fields such as metadata or labels, add the values to the existing. For more details, see :ref:`appendix-update`. Returns: TransformationSchedule | TransformationScheduleList: Updated transformation schedule(s) diff --git a/cognite/client/_api/user_profiles.py b/cognite/client/_api/user_profiles.py index bd37f122a1..9247bb788c 100644 --- a/cognite/client/_api/user_profiles.py +++ b/cognite/client/_api/user_profiles.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, cast, overload +from typing import cast, overload from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ @@ -85,7 +85,7 @@ def retrieve(self, user_identifier: str | SequenceNotStr[str]) -> UserProfile | return profiles # TODO: The API does not guarantee any ordering (against style guidelines, no timeline for fix) # so we sort manually for now: - return UserProfileList(cast(List[UserProfile], [profiles.get(user) for user in user_identifier])) + return UserProfileList(cast(list[UserProfile], [profiles.get(user) for user in user_identifier])) def search(self, name: str, limit: int = DEFAULT_LIMIT_READ) -> UserProfileList: """`Search for user profiles `_ diff --git a/cognite/client/_api/workflows.py b/cognite/client/_api/workflows.py index 38f748f38c..1ee724b802 100644 --- a/cognite/client/_api/workflows.py +++ b/cognite/client/_api/workflows.py @@ -1,12 +1,10 @@ from __future__ import annotations import warnings -from collections.abc import Iterator -from typing import TYPE_CHECKING, Any, Literal, MutableSequence, Tuple, Union, overload +from collections.abc import Iterator, MutableSequence +from typing import TYPE_CHECKING, Any, Literal, TypeAlias, overload from urllib.parse import quote -from typing_extensions import TypeAlias - from cognite.client._api_client import APIClient from cognite.client._constants import DEFAULT_LIMIT_READ from cognite.client.data_classes.workflows import ( @@ -42,8 +40,8 @@ from cognite.client import ClientConfig, CogniteClient from cognite.client.data_classes import ClientCredentials -WorkflowIdentifier: TypeAlias = Union[WorkflowVersionId, Tuple[str, str], str] -WorkflowVersionIdentifier: TypeAlias = Union[WorkflowVersionId, Tuple[str, str]] +WorkflowIdentifier: TypeAlias = WorkflowVersionId | tuple[str, str] | str +WorkflowVersionIdentifier: TypeAlias = WorkflowVersionId | tuple[str, str] def wrap_workflow_ids( @@ -232,7 +230,7 @@ def update( Args: task_id (str): The server-generated id of the task. - status (Literal["completed", "failed"]): The new status of the task. Must be either 'completed' or 'failed'. + status (Literal['completed', 'failed']): The new status of the task. Must be either 'completed' or 'failed'. output (dict | None): The output of the task. This will be available for tasks that has specified it as an output with the string "${.output}" Returns: diff --git a/cognite/client/_api_client.py b/cognite/client/_api_client.py index c7c438e9f8..550b3f7e98 100644 --- a/cognite/client/_api_client.py +++ b/cognite/client/_api_client.py @@ -7,18 +7,14 @@ import re import warnings from collections import UserList +from collections.abc import Iterator, MutableMapping, Sequence from typing import ( TYPE_CHECKING, Any, ClassVar, - Dict, - Iterator, Literal, - MutableMapping, NoReturn, - Sequence, TypeVar, - Union, cast, overload, ) @@ -55,7 +51,7 @@ split_into_chunks, unpack_items_in_payload, ) -from cognite.client.utils._concurrency import execute_tasks +from cognite.client.utils._concurrency import TaskExecutor, execute_tasks from cognite.client.utils._identifier import ( Identifier, IdentifierCore, @@ -69,8 +65,6 @@ from cognite.client.utils.useful_types import SequenceNotStr if TYPE_CHECKING: - from concurrent.futures import ThreadPoolExecutor - from cognite.client import CogniteClient from cognite.client.config import ClientConfig @@ -328,7 +322,7 @@ def _retrieve_multiple( headers: dict[str, Any] | None = None, other_params: dict[str, Any] | None = None, params: dict[str, Any] | None = None, - executor: ThreadPoolExecutor | None = None, + executor: TaskExecutor | None = None, api_subversion: str | None = None, settings_forcing_raw_response_loading: list[str] | None = None, ) -> T_CogniteResource | None: ... @@ -344,7 +338,7 @@ def _retrieve_multiple( headers: dict[str, Any] | None = None, other_params: dict[str, Any] | None = None, params: dict[str, Any] | None = None, - executor: ThreadPoolExecutor | None = None, + executor: TaskExecutor | None = None, api_subversion: str | None = None, settings_forcing_raw_response_loading: list[str] | None = None, ) -> T_CogniteResourceList: ... @@ -359,7 +353,7 @@ def _retrieve_multiple( headers: dict[str, Any] | None = None, other_params: dict[str, Any] | None = None, params: dict[str, Any] | None = None, - executor: ThreadPoolExecutor | None = None, + executor: TaskExecutor | None = None, api_subversion: str | None = None, settings_forcing_raw_response_loading: list[str] | None = None, ) -> T_CogniteResourceList | T_CogniteResource | None: @@ -866,7 +860,7 @@ def _create_multiple( extra_body_fields: dict[str, Any] | None = None, limit: int | None = None, input_resource_cls: type[CogniteResource] | None = None, - executor: ThreadPoolExecutor | None = None, + executor: TaskExecutor | None = None, api_subversion: str | None = None, ) -> T_CogniteResourceList: ... @@ -882,7 +876,7 @@ def _create_multiple( extra_body_fields: dict[str, Any] | None = None, limit: int | None = None, input_resource_cls: type[CogniteResource] | None = None, - executor: ThreadPoolExecutor | None = None, + executor: TaskExecutor | None = None, api_subversion: str | None = None, ) -> T_WritableCogniteResource: ... @@ -900,7 +894,7 @@ def _create_multiple( extra_body_fields: dict[str, Any] | None = None, limit: int | None = None, input_resource_cls: type[CogniteResource] | None = None, - executor: ThreadPoolExecutor | None = None, + executor: TaskExecutor | None = None, api_subversion: str | None = None, ) -> T_CogniteResourceList | T_WritableCogniteResource: resource_path = resource_path or self._RESOURCE_PATH @@ -908,9 +902,9 @@ def _create_multiple( limit = limit or self._CREATE_LIMIT single_item = not isinstance(items, Sequence) if single_item: - items = cast(Union[Sequence[T_WritableCogniteResource], Sequence[Dict[str, Any]]], [items]) + items = cast(Sequence[T_WritableCogniteResource] | Sequence[dict[str, Any]], [items]) else: - items = cast(Union[Sequence[T_WritableCogniteResource], Sequence[Dict[str, Any]]], items) + items = cast(Sequence[T_WritableCogniteResource] | Sequence[dict[str, Any]], items) items = [item.as_write() if isinstance(item, WriteableCogniteResource) else item for item in items] @@ -965,7 +959,7 @@ def _delete_multiple( headers: dict[str, Any] | None = None, extra_body_fields: dict[str, Any] | None = None, returns_items: bool = False, - executor: ThreadPoolExecutor | None = None, + executor: TaskExecutor | None = None, ) -> list | None: resource_path = resource_path or self._RESOURCE_PATH tasks = [ @@ -1034,9 +1028,9 @@ def _update_multiple( patch_objects = [] single_item = not isinstance(items, (Sequence, UserList)) if single_item: - item_list = cast(Union[Sequence[CogniteResource], Sequence[CogniteUpdate]], [items]) + item_list = cast(Sequence[CogniteResource] | Sequence[CogniteUpdate], [items]) else: - item_list = cast(Union[Sequence[CogniteResource], Sequence[CogniteUpdate]], items) + item_list = cast(Sequence[CogniteResource] | Sequence[CogniteUpdate], items) for index, item in enumerate(item_list): if isinstance(item, CogniteResource): diff --git a/cognite/client/_http_client.py b/cognite/client/_http_client.py index 0b591b0d18..a5846326ab 100644 --- a/cognite/client/_http_client.py +++ b/cognite/client/_http_client.py @@ -4,8 +4,9 @@ import random import socket import time +from collections.abc import Callable, Iterable, MutableMapping from http import cookiejar -from typing import Any, Callable, Iterable, Literal, MutableMapping +from typing import Any, Literal import requests import requests.adapters diff --git a/cognite/client/credentials.py b/cognite/client/credentials.py index a6ee426e44..a3850b86c6 100644 --- a/cognite/client/credentials.py +++ b/cognite/client/credentials.py @@ -7,10 +7,11 @@ import threading import time from abc import abstractmethod +from collections.abc import Callable from datetime import datetime from pathlib import Path from types import MappingProxyType -from typing import Any, Callable, Protocol, runtime_checkable +from typing import Any, Protocol, runtime_checkable from msal import ConfidentialClientApplication, PublicClientApplication, SerializableTokenCache from oauthlib.oauth2 import BackendApplicationClient, OAuth2Error diff --git a/cognite/client/data_classes/_base.py b/cognite/client/data_classes/_base.py index 0e2c7027ea..7f5b57be17 100644 --- a/cognite/client/data_classes/_base.py +++ b/cognite/client/data_classes/_base.py @@ -2,30 +2,26 @@ from abc import ABC, abstractmethod from collections import UserList -from collections.abc import Iterable +from collections.abc import Collection, Iterable, Iterator, Sequence from contextlib import suppress from dataclasses import dataclass from enum import Enum from typing import ( TYPE_CHECKING, Any, - Collection, Generic, - Iterator, - List, Literal, Protocol, - Sequence, SupportsIndex, + TypeAlias, TypeVar, - Union, cast, final, overload, runtime_checkable, ) -from typing_extensions import Self, TypeAlias +from typing_extensions import Self from cognite.client.exceptions import CogniteMissingClientError from cognite.client.utils import _json @@ -645,7 +641,7 @@ class Geometry(CogniteObject): """Represents the points, curves and surfaces in the coordinate space. Args: - type (Literal["Point", "MultiPoint", "LineString", "MultiLineString", "Polygon", "MultiPolygon"]): The geometry type. + type (Literal['Point', 'MultiPoint', 'LineString', 'MultiLineString', 'Polygon', 'MultiPolygon']): The geometry type. coordinates (list): An array of the coordinates of the geometry. The structure of the elements in this array is determined by the type of geometry. geometries (Collection[Geometry] | None): No description. @@ -718,7 +714,7 @@ def dump(self, camel_case: bool = True) -> dict[str, Any]: return dumped -SortableProperty: TypeAlias = Union[str, List[str], EnumProperty] +SortableProperty: TypeAlias = str | list[str] | EnumProperty class CogniteSort: diff --git a/cognite/client/data_classes/aggregations.py b/cognite/client/data_classes/aggregations.py index ed5a779cab..c20ea8d8ae 100644 --- a/cognite/client/data_classes/aggregations.py +++ b/cognite/client/data_classes/aggregations.py @@ -2,24 +2,21 @@ from abc import ABC, abstractmethod from collections import UserList -from collections.abc import Collection +from collections.abc import Collection, Iterator, MutableSequence, Sequence from dataclasses import dataclass, field from typing import ( TYPE_CHECKING, Any, ClassVar, - Iterator, - MutableSequence, - Sequence, SupportsIndex, + TypeAlias, TypeVar, - Union, cast, final, overload, ) -from typing_extensions import Self, TypeAlias +from typing_extensions import Self from cognite.client.data_classes._base import CogniteObject, CogniteResourceList, UnknownCogniteObject from cognite.client.data_classes.labels import Label @@ -267,7 +264,7 @@ def dump(self, camel_case: bool = True) -> dict[str, Any]: return output -FilterValue: TypeAlias = Union[str, float, bool, Label] +FilterValue: TypeAlias = str | float | bool | Label class AggregationFilter(ABC): diff --git a/cognite/client/data_classes/annotations.py b/cognite/client/data_classes/annotations.py index 264be773d3..61b3024b9f 100644 --- a/cognite/client/data_classes/annotations.py +++ b/cognite/client/data_classes/annotations.py @@ -1,9 +1,7 @@ from __future__ import annotations from abc import ABC -from typing import TYPE_CHECKING, Any, Literal, cast - -from typing_extensions import TypeAlias +from typing import TYPE_CHECKING, Any, Literal, TypeAlias, cast from cognite.client.data_classes._base import ( CogniteFilter, @@ -184,11 +182,11 @@ class AnnotationWrite(AnnotationCore): Args: annotation_type (AnnotationType): The type of the annotation. This uniquely decides what the structure of the 'data' block will be. data (dict): The annotation information. The format of this object is decided by and validated against the 'annotation_type' attribute. - status (Literal["suggested", "approved", "rejected"]): The status of the annotation, e.g. "suggested", "approved", "rejected". + status (Literal['suggested', 'approved', 'rejected']): The status of the annotation, e.g. "suggested", "approved", "rejected". creating_app (str): The name of the app from which this annotation was created. creating_app_version (str): The version of the app that created this annotation. Must be a valid semantic versioning (SemVer) string. creating_user (str | None): A username, or email, or name. This is not checked nor enforced. If the value is None, it means the annotation was created by a service. - annotated_resource_type (Literal["file", "threedmodel"]): Type name of the CDF resource that is annotated, e.g. "file". + annotated_resource_type (Literal['file', 'threedmodel']): Type name of the CDF resource that is annotated, e.g. "file". annotated_resource_id (int): The internal ID of the annotated resource. """ diff --git a/cognite/client/data_classes/assets.py b/cognite/client/data_classes/assets.py index 146fa83998..55a410a30e 100644 --- a/cognite/client/data_classes/assets.py +++ b/cognite/client/data_classes/assets.py @@ -8,26 +8,21 @@ import warnings from abc import ABC from collections import Counter, defaultdict +from collections.abc import Sequence from enum import auto from functools import lru_cache +from graphlib import TopologicalSorter from pathlib import Path from typing import ( TYPE_CHECKING, Any, - Dict, - List, Literal, - Optional, - Sequence, TextIO, + TypeAlias, TypeVar, - Union, cast, ) -from graphlib import TopologicalSorter -from typing_extensions import TypeAlias - from cognite.client.data_classes._base import ( CogniteFilter, CogniteLabelUpdate, @@ -890,7 +885,7 @@ def _count_subtree(xid: str, count: int = 0) -> int: counts.sort(key=lambda args: -args[-1]) # The count for the fictitious "root of roots" is just len(assets), so we remove it: (count_dct := dict(counts)).pop(None, None) - return count_dct + return cast(dict[str, int], count_dct) def _on_error(self, on_error: Literal["ignore", "warn", "raise"], message: str) -> None: if on_error == "warn": @@ -943,10 +938,10 @@ def _preconditions_for_cycle_check_are_met(self, on_error: Literal["ignore", "wa def _locate_cycles(self) -> tuple[int, list[list[str]]]: has_cycles = set() no_cycles = {None, *(a.external_id for a in self._roots or [])} - edges = cast(Dict[str, Optional[str]], {a.external_id: a.parent_external_id for a in self._assets}) + edges = cast(dict[str, str | None], {a.external_id: a.parent_external_id for a in self._assets}) if self._ignore_orphans: - no_cycles |= {a.parent_external_id for a in cast(List[Asset], self._orphans)} + no_cycles |= {a.parent_external_id for a in cast(list[Asset], self._orphans)} for xid, parent in edges.items(): if parent in no_cycles: @@ -1086,7 +1081,7 @@ def metadata_key(key: str) -> list[str]: return ["metadata", key] -AssetPropertyLike: TypeAlias = Union[AssetProperty, str, List[str]] +AssetPropertyLike: TypeAlias = AssetProperty | str | list[str] class SortableAssetProperty(EnumProperty): @@ -1104,7 +1099,7 @@ def metadata_key(key: str) -> list[str]: return ["metadata", key] -SortableAssetPropertyLike: TypeAlias = Union[SortableAssetProperty, str, List[str]] +SortableAssetPropertyLike: TypeAlias = SortableAssetProperty | str | list[str] class AssetSort(CogniteSort): diff --git a/cognite/client/data_classes/capabilities.py b/cognite/client/data_classes/capabilities.py index af250630c4..1de5d53dc2 100644 --- a/cognite/client/data_classes/capabilities.py +++ b/cognite/client/data_classes/capabilities.py @@ -7,10 +7,11 @@ import logging import warnings from abc import ABC -from dataclasses import asdict, dataclass, field +from collections.abc import Iterable, Sequence +from dataclasses import InitVar, asdict, dataclass, field from itertools import product from types import MappingProxyType -from typing import TYPE_CHECKING, Any, ClassVar, Iterable, Literal, NamedTuple, NoReturn, Sequence, cast +from typing import TYPE_CHECKING, Any, ClassVar, Literal, NamedTuple, NoReturn, cast from typing_extensions import Self @@ -52,15 +53,13 @@ class Capability(ABC): _capability_name: ClassVar[str] actions: Sequence[Action] scope: Scope - # TODO: Python 3.10: Remove 'allow_unknown' for all subclasses by using: - # InitVar = field(default=False, kw_only=True, ...) - allow_unknown: bool + allow_unknown: InitVar[bool] = field(default=False, kw_only=True) - def __post_init__(self) -> None: + def __post_init__(self, allow_unknown: bool) -> None: try: # There are so many things that may fail validation; non-enum passed, not iterable etc. # We always want to show the example usage to the user. - if not self.allow_unknown: + if not allow_unknown: self._validate() except Exception as err: acl_name = "ACL" if (cls := type(self)) is UnknownAcl else cls.__name__ @@ -531,7 +530,6 @@ class UnknownAcl(Capability): capability_name: str = "unknownAcl" raw_data: dict[str, Any] = field(default_factory=dict) - allow_unknown: bool = field(default=False, compare=False, repr=False) @classmethod def show_example_usage(cls) -> Literal[""]: @@ -543,7 +541,6 @@ class AnalyticsAcl(Capability): _capability_name = "analyticsAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -559,7 +556,6 @@ class AnnotationsAcl(Capability): _capability_name = "annotationsAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -576,7 +572,6 @@ class AssetsAcl(Capability): _capability_name = "assetsAcl" actions: Sequence[Action] scope: AllScope | DataSetScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -592,7 +587,6 @@ class DataSetsAcl(Capability): _capability_name = "datasetsAcl" actions: Sequence[Action] scope: AllScope | IDScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -609,7 +603,6 @@ class DigitalTwinAcl(Capability): _capability_name = "digitalTwinAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -624,7 +617,6 @@ class EntityMatchingAcl(Capability): _capability_name = "entitymatchingAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -639,7 +631,6 @@ class EventsAcl(Capability): _capability_name = "eventsAcl" actions: Sequence[Action] scope: AllScope | DataSetScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -655,7 +646,6 @@ class ExtractionPipelinesAcl(Capability): _capability_name = "extractionPipelinesAcl" actions: Sequence[Action] scope: AllScope | IDScope | DataSetScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -672,7 +662,6 @@ class ExtractionsRunAcl(Capability): _capability_name = "extractionRunsAcl" actions: Sequence[Action] scope: AllScope | DataSetScope | ExtractionPipelineScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -689,7 +678,6 @@ class ExtractionConfigsAcl(Capability): _capability_name = "extractionConfigsAcl" actions: Sequence[Action] scope: AllScope | DataSetScope | ExtractionPipelineScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -706,7 +694,6 @@ class FilesAcl(Capability): _capability_name = "filesAcl" actions: Sequence[Action] scope: AllScope | DataSetScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -722,7 +709,6 @@ class FunctionsAcl(Capability): _capability_name = "functionsAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -737,7 +723,6 @@ class GeospatialAcl(Capability): _capability_name = "geospatialAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -752,7 +737,6 @@ class GeospatialCrsAcl(Capability): _capability_name = "geospatialCrsAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -767,7 +751,6 @@ class GroupsAcl(Capability): _capability_name = "groupsAcl" actions: Sequence[Action] scope: AllScope | CurrentUserScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Create = "CREATE" @@ -786,7 +769,6 @@ class LabelsAcl(Capability): _capability_name = "labelsAcl" actions: Sequence[Action] scope: AllScope | DataSetScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -802,7 +784,6 @@ class LocationFiltersAcl(Capability): _capability_name = "locationFiltersAcl" actions: Sequence[Action] scope: AllScope | IDScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -818,7 +799,6 @@ class ProjectsAcl(Capability): _capability_name = "projectsAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -836,7 +816,6 @@ class RawAcl(Capability): _capability_name = "rawAcl" actions: Sequence[Action] scope: AllScope | TableScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -853,7 +832,6 @@ class RelationshipsAcl(Capability): _capability_name = "relationshipsAcl" actions: Sequence[Action] scope: AllScope | DataSetScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -869,7 +847,6 @@ class RoboticsAcl(Capability): _capability_name = "roboticsAcl" actions: Sequence[Action] scope: AllScope | DataSetScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -887,7 +864,6 @@ class SecurityCategoriesAcl(Capability): _capability_name = "securityCategoriesAcl" actions: Sequence[Action] scope: AllScope | IDScopeLowerCase - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] MemberOf = "MEMBEROF" @@ -906,7 +882,6 @@ class SeismicAcl(Capability): _capability_name = "seismicAcl" actions: Sequence[Action] scope: AllScope | PartitionScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -922,7 +897,6 @@ class SequencesAcl(Capability): _capability_name = "sequencesAcl" actions: Sequence[Action] scope: AllScope | DataSetScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -938,7 +912,6 @@ class SessionsAcl(Capability): _capability_name = "sessionsAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] List = "LIST" @@ -954,7 +927,6 @@ class ThreeDAcl(Capability): _capability_name = "threedAcl" actions: Sequence[Action] scope: AllScope | DataSetScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -972,7 +944,6 @@ class TimeSeriesAcl(Capability): _capability_name = "timeSeriesAcl" actions: Sequence[Action] scope: AllScope | DataSetScope | IDScopeLowerCase | AssetRootIDScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -990,7 +961,6 @@ class TimeSeriesSubscriptionsAcl(Capability): _capability_name = "timeSeriesSubscriptionsAcl" actions: Sequence[Action] scope: AllScope | DataSetScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1006,7 +976,6 @@ class TransformationsAcl(Capability): _capability_name = "transformationsAcl" actions: Sequence[Action] scope: AllScope | DataSetScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1022,7 +991,6 @@ class TypesAcl(Capability): _capability_name = "typesAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1037,7 +1005,6 @@ class WellsAcl(Capability): _capability_name = "wellsAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1052,7 +1019,6 @@ class ExperimentsAcl(Capability): _capability_name = "experimentAcl" actions: Sequence[Action] scope: ExperimentsScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Use = "USE" @@ -1073,7 +1039,6 @@ class TemplateGroupsAcl(Capability): _capability_name = "templateGroupsAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1089,7 +1054,6 @@ class TemplateInstancesAcl(Capability): _capability_name = "templateInstancesAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1105,7 +1069,6 @@ class DataModelInstancesAcl(Capability): _capability_name = "dataModelInstancesAcl" actions: Sequence[Action] scope: AllScope | SpaceIDScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1123,7 +1086,6 @@ class DataModelsAcl(Capability): _capability_name = "dataModelsAcl" actions: Sequence[Action] scope: AllScope | SpaceIDScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1140,7 +1102,6 @@ class PipelinesAcl(Capability): _capability_name = "pipelinesAcl" actions: Sequence[Action] scope: AllScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1155,7 +1116,6 @@ class DocumentPipelinesAcl(Capability): _capability_name = "documentPipelinesAcl" actions: Sequence[Action] scope: AllScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1170,7 +1130,6 @@ class FilePipelinesAcl(Capability): _capability_name = "filePipelinesAcl" actions: Sequence[Action] scope: AllScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1185,7 +1144,6 @@ class NotificationsAcl(Capability): _capability_name = "notificationsAcl" actions: Sequence[Action] scope: AllScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1200,7 +1158,6 @@ class ScheduledCalculationsAcl(Capability): _capability_name = "scheduledCalculationsAcl" actions: Sequence[Action] scope: AllScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1215,7 +1172,6 @@ class MonitoringTasksAcl(Capability): _capability_name = "monitoringTasksAcl" actions: Sequence[Action] scope: AllScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1230,7 +1186,6 @@ class HostedExtractorsAcl(Capability): _capability_name = "hostedExtractorsAcl" actions: Sequence[Action] scope: AllScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1245,7 +1200,6 @@ class VisionModelAcl(Capability): _capability_name = "visionModelAcl" actions: Sequence[Action] scope: AllScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1260,7 +1214,6 @@ class DocumentFeedbackAcl(Capability): _capability_name = "documentFeedbackAcl" actions: Sequence[Action] scope: AllScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Create = "CREATE" @@ -1276,7 +1229,6 @@ class WorkflowOrchestrationAcl(Capability): _capability_name = "workflowOrchestrationAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1291,7 +1243,6 @@ class PostgresGatewayAcl(Capability): _capability_name = "postgresGatewayAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1306,7 +1257,6 @@ class UserProfilesAcl(Capability): _capability_name = "userProfilesAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1320,7 +1270,6 @@ class AuditlogAcl(Capability): _capability_name = "auditlogAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1334,7 +1283,6 @@ class LegacyModelHostingAcl(LegacyCapability): _capability_name = "modelHostingAcl" actions: Sequence[Action] scope: AllScope = field(default_factory=AllScope) - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" @@ -1349,7 +1297,6 @@ class LegacyGenericsAcl(LegacyCapability): _capability_name = "genericsAcl" actions: Sequence[Action] scope: AllScope - allow_unknown: bool = field(default=False, compare=False, repr=False) class Action(Capability.Action): # type: ignore [misc] Read = "READ" diff --git a/cognite/client/data_classes/contextualization.py b/cognite/client/data_classes/contextualization.py index ad056330c4..b24402b897 100644 --- a/cognite/client/data_classes/contextualization.py +++ b/cognite/client/data_classes/contextualization.py @@ -2,9 +2,10 @@ import time import warnings +from collections.abc import Sequence from dataclasses import dataclass from enum import Enum -from typing import TYPE_CHECKING, Any, Sequence, Type, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from requests.utils import CaseInsensitiveDict from typing_extensions import Self @@ -552,7 +553,7 @@ def convert(self) -> DiagramConvertResults: # Vision dataclasses -FeatureClass = Union[Type[TextRegion], Type[AssetLink], Type[ObjectDetection]] +FeatureClass = type[TextRegion] | type[AssetLink] | type[ObjectDetection] class VisionFeature(str, Enum): diff --git a/cognite/client/data_classes/data_modeling/cdm/v1.py b/cognite/client/data_classes/data_modeling/cdm/v1.py index e652bab6de..a7b455174e 100644 --- a/cognite/client/data_classes/data_modeling/cdm/v1.py +++ b/cognite/client/data_classes/data_modeling/cdm/v1.py @@ -241,9 +241,9 @@ class Cognite360ImageCollectionApply(_Cognite360ImageCollectionProperties, Typed description (str | None): Description of the instance tags (list[str] | None): Text based labels for generic use, limited to 1000 aliases (list[str] | None): Alternative names for the node - status (Literal["Done", "Failed", "Processing", "Queued"] | None): The status field. + status (Literal['Done', 'Failed', 'Processing', 'Queued'] | None): The status field. published (bool | None): The published field. - revision_type (Literal["CAD", "Image360", "PointCloud"] | None): The revision type field. + revision_type (Literal['CAD', 'Image360', 'PointCloud'] | None): The revision type field. model_3d (DirectRelationReference | tuple[str, str] | None): The model 3d field. existing_version (int | None): Fail the ingestion request if the node's version is greater than or equal to this value. If no existingVersion is specified, the ingestion will always overwrite any existing data for the node (for the specified container or node). If existingVersion is set to 0, the upsert will behave as an insert, so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion request, then the item will be skipped instead of failing the ingestion request. type (DirectRelationReference | tuple[str, str] | None): Direct relation pointing to the type node. @@ -292,9 +292,9 @@ class Cognite360ImageCollection(_Cognite360ImageCollectionProperties, TypedNode) description (str | None): Description of the instance tags (list[str] | None): Text based labels for generic use, limited to 1000 aliases (list[str] | None): Alternative names for the node - status (Literal["Done", "Failed", "Processing", "Queued"] | None): The status field. + status (Literal['Done', 'Failed', 'Processing', 'Queued'] | None): The status field. published (bool | None): The published field. - revision_type (Literal["CAD", "Image360", "PointCloud"] | None): The revision type field. + revision_type (Literal['CAD', 'Image360', 'PointCloud'] | None): The revision type field. model_3d (DirectRelationReference | None): The model 3d field. type (DirectRelationReference | None): Direct relation pointing to the type node. deleted_time (int | None): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. Timestamp when the instance was soft deleted. Note that deleted instances are filtered out of query results, but present in sync results @@ -367,7 +367,7 @@ class Cognite360ImageModelApply(_Cognite360ImageModelProperties, TypedNodeApply) description (str | None): Description of the instance tags (list[str] | None): Text based labels for generic use, limited to 1000 aliases (list[str] | None): Alternative names for the node - model_type (Literal["CAD", "Image360", "PointCloud"] | None): CAD, PointCloud or Image360 + model_type (Literal['CAD', 'Image360', 'PointCloud'] | None): CAD, PointCloud or Image360 thumbnail (DirectRelationReference | tuple[str, str] | None): Thumbnail of the 3D model existing_version (int | None): Fail the ingestion request if the node's version is greater than or equal to this value. If no existingVersion is specified, the ingestion will always overwrite any existing data for the node (for the specified container or node). If existingVersion is set to 0, the upsert will behave as an insert, so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion request, then the item will be skipped instead of failing the ingestion request. type (DirectRelationReference | tuple[str, str] | None): Direct relation pointing to the type node. @@ -412,7 +412,7 @@ class Cognite360ImageModel(_Cognite360ImageModelProperties, TypedNode): description (str | None): Description of the instance tags (list[str] | None): Text based labels for generic use, limited to 1000 aliases (list[str] | None): Alternative names for the node - model_type (Literal["CAD", "Image360", "PointCloud"] | None): CAD, PointCloud or Image360 + model_type (Literal['CAD', 'Image360', 'PointCloud'] | None): CAD, PointCloud or Image360 thumbnail (DirectRelationReference | None): Thumbnail of the 3D model type (DirectRelationReference | None): Direct relation pointing to the type node. deleted_time (int | None): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. Timestamp when the instance was soft deleted. Note that deleted instances are filtered out of query results, but present in sync results @@ -584,7 +584,7 @@ class Cognite3DModelApply(_Cognite3DModelProperties, TypedNodeApply): description (str | None): Description of the instance tags (list[str] | None): Text based labels for generic use, limited to 1000 aliases (list[str] | None): Alternative names for the node - model_type (Literal["CAD", "Image360", "PointCloud"] | None): CAD, PointCloud or Image360 + model_type (Literal['CAD', 'Image360', 'PointCloud'] | None): CAD, PointCloud or Image360 thumbnail (DirectRelationReference | tuple[str, str] | None): Thumbnail of the 3D model existing_version (int | None): Fail the ingestion request if the node's version is greater than or equal to this value. If no existingVersion is specified, the ingestion will always overwrite any existing data for the node (for the specified container or node). If existingVersion is set to 0, the upsert will behave as an insert, so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion request, then the item will be skipped instead of failing the ingestion request. type (DirectRelationReference | tuple[str, str] | None): Direct relation pointing to the type node. @@ -629,7 +629,7 @@ class Cognite3DModel(_Cognite3DModelProperties, TypedNode): description (str | None): Description of the instance tags (list[str] | None): Text based labels for generic use, limited to 1000 aliases (list[str] | None): Alternative names for the node - model_type (Literal["CAD", "Image360", "PointCloud"] | None): CAD, PointCloud or Image360 + model_type (Literal['CAD', 'Image360', 'PointCloud'] | None): CAD, PointCloud or Image360 thumbnail (DirectRelationReference | None): Thumbnail of the 3D model type (DirectRelationReference | None): Direct relation pointing to the type node. deleted_time (int | None): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. Timestamp when the instance was soft deleted. Note that deleted instances are filtered out of query results, but present in sync results @@ -839,9 +839,9 @@ class Cognite3DRevisionApply(_Cognite3DRevisionProperties, TypedNodeApply): Args: space (str): The space where the node is located. external_id (str): The external id of the Cognite 3D revision. - status (Literal["Done", "Failed", "Processing", "Queued"] | None): The status field. + status (Literal['Done', 'Failed', 'Processing', 'Queued'] | None): The status field. published (bool | None): The published field. - revision_type (Literal["CAD", "Image360", "PointCloud"] | None): The revision type field. + revision_type (Literal['CAD', 'Image360', 'PointCloud'] | None): The revision type field. model_3d (DirectRelationReference | tuple[str, str] | None): The model 3d field. existing_version (int | None): Fail the ingestion request if the node's version is greater than or equal to this value. If no existingVersion is specified, the ingestion will always overwrite any existing data for the node (for the specified container or node). If existingVersion is set to 0, the upsert will behave as an insert, so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion request, then the item will be skipped instead of failing the ingestion request. type (DirectRelationReference | tuple[str, str] | None): Direct relation pointing to the type node. @@ -879,9 +879,9 @@ class Cognite3DRevision(_Cognite3DRevisionProperties, TypedNode): version (int): DMS version. last_updated_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. created_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. - status (Literal["Done", "Failed", "Processing", "Queued"] | None): The status field. + status (Literal['Done', 'Failed', 'Processing', 'Queued'] | None): The status field. published (bool | None): The published field. - revision_type (Literal["CAD", "Image360", "PointCloud"] | None): The revision type field. + revision_type (Literal['CAD', 'Image360', 'PointCloud'] | None): The revision type field. model_3d (DirectRelationReference | None): The model 3d field. type (DirectRelationReference | None): Direct relation pointing to the type node. deleted_time (int | None): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. Timestamp when the instance was soft deleted. Note that deleted instances are filtered out of query results, but present in sync results @@ -1717,7 +1717,7 @@ class CogniteCADModelApply(_CogniteCADModelProperties, TypedNodeApply): description (str | None): Description of the instance tags (list[str] | None): Text based labels for generic use, limited to 1000 aliases (list[str] | None): Alternative names for the node - model_type (Literal["CAD", "Image360", "PointCloud"] | None): CAD, PointCloud or Image360 + model_type (Literal['CAD', 'Image360', 'PointCloud'] | None): CAD, PointCloud or Image360 thumbnail (DirectRelationReference | tuple[str, str] | None): Thumbnail of the 3D model existing_version (int | None): Fail the ingestion request if the node's version is greater than or equal to this value. If no existingVersion is specified, the ingestion will always overwrite any existing data for the node (for the specified container or node). If existingVersion is set to 0, the upsert will behave as an insert, so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion request, then the item will be skipped instead of failing the ingestion request. type (DirectRelationReference | tuple[str, str] | None): Direct relation pointing to the type node. @@ -1762,7 +1762,7 @@ class CogniteCADModel(_CogniteCADModelProperties, TypedNode): description (str | None): Description of the instance tags (list[str] | None): Text based labels for generic use, limited to 1000 aliases (list[str] | None): Alternative names for the node - model_type (Literal["CAD", "Image360", "PointCloud"] | None): CAD, PointCloud or Image360 + model_type (Literal['CAD', 'Image360', 'PointCloud'] | None): CAD, PointCloud or Image360 thumbnail (DirectRelationReference | None): Thumbnail of the 3D model type (DirectRelationReference | None): Direct relation pointing to the type node. deleted_time (int | None): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. Timestamp when the instance was soft deleted. Note that deleted instances are filtered out of query results, but present in sync results @@ -1970,9 +1970,9 @@ class CogniteCADRevisionApply(_CogniteCADRevisionProperties, TypedNodeApply): Args: space (str): The space where the node is located. external_id (str): The external id of the Cognite cad revision. - status (Literal["Done", "Failed", "Processing", "Queued"] | None): The status field. + status (Literal['Done', 'Failed', 'Processing', 'Queued'] | None): The status field. published (bool | None): The published field. - revision_type (Literal["CAD", "Image360", "PointCloud"] | None): The revision type field. + revision_type (Literal['CAD', 'Image360', 'PointCloud'] | None): The revision type field. model_3d (DirectRelationReference | tuple[str, str] | None): . revision_id (int | None): The 3D API revision identifier for this CAD model existing_version (int | None): Fail the ingestion request if the node's version is greater than or equal to this value. If no existingVersion is specified, the ingestion will always overwrite any existing data for the node (for the specified container or node). If existingVersion is set to 0, the upsert will behave as an insert, so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion request, then the item will be skipped instead of failing the ingestion request. @@ -2011,9 +2011,9 @@ class CogniteCADRevision(_CogniteCADRevisionProperties, TypedNode): version (int): DMS version. last_updated_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. created_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. - status (Literal["Done", "Failed", "Processing", "Queued"] | None): The status field. + status (Literal['Done', 'Failed', 'Processing', 'Queued'] | None): The status field. published (bool | None): The published field. - revision_type (Literal["CAD", "Image360", "PointCloud"] | None): The revision type field. + revision_type (Literal['CAD', 'Image360', 'PointCloud'] | None): The revision type field. model_3d (DirectRelationReference | None): . revision_id (int | None): The 3D API revision identifier for this CAD model type (DirectRelationReference | None): Direct relation pointing to the type node. @@ -2912,7 +2912,7 @@ class CognitePointCloudModelApply(_CognitePointCloudModelProperties, TypedNodeAp description (str | None): Description of the instance tags (list[str] | None): Text based labels for generic use, limited to 1000 aliases (list[str] | None): Alternative names for the node - model_type (Literal["CAD", "Image360", "PointCloud"] | None): CAD, PointCloud or Image360 + model_type (Literal['CAD', 'Image360', 'PointCloud'] | None): CAD, PointCloud or Image360 thumbnail (DirectRelationReference | tuple[str, str] | None): Thumbnail of the 3D model existing_version (int | None): Fail the ingestion request if the node's version is greater than or equal to this value. If no existingVersion is specified, the ingestion will always overwrite any existing data for the node (for the specified container or node). If existingVersion is set to 0, the upsert will behave as an insert, so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion request, then the item will be skipped instead of failing the ingestion request. type (DirectRelationReference | tuple[str, str] | None): Direct relation pointing to the type node. @@ -2957,7 +2957,7 @@ class CognitePointCloudModel(_CognitePointCloudModelProperties, TypedNode): description (str | None): Description of the instance tags (list[str] | None): Text based labels for generic use, limited to 1000 aliases (list[str] | None): Alternative names for the node - model_type (Literal["CAD", "Image360", "PointCloud"] | None): CAD, PointCloud or Image360 + model_type (Literal['CAD', 'Image360', 'PointCloud'] | None): CAD, PointCloud or Image360 thumbnail (DirectRelationReference | None): Thumbnail of the 3D model type (DirectRelationReference | None): Direct relation pointing to the type node. deleted_time (int | None): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. Timestamp when the instance was soft deleted. Note that deleted instances are filtered out of query results, but present in sync results @@ -3022,9 +3022,9 @@ class CognitePointCloudRevisionApply(_CognitePointCloudRevisionProperties, Typed Args: space (str): The space where the node is located. external_id (str): The external id of the Cognite point cloud revision. - status (Literal["Done", "Failed", "Processing", "Queued"] | None): The status field. + status (Literal['Done', 'Failed', 'Processing', 'Queued'] | None): The status field. published (bool | None): The published field. - revision_type (Literal["CAD", "Image360", "PointCloud"] | None): The revision type field. + revision_type (Literal['CAD', 'Image360', 'PointCloud'] | None): The revision type field. model_3d (DirectRelationReference | tuple[str, str] | None): . revision_id (int | None): The 3D API revision identifier for this PointCloud model existing_version (int | None): Fail the ingestion request if the node's version is greater than or equal to this value. If no existingVersion is specified, the ingestion will always overwrite any existing data for the node (for the specified container or node). If existingVersion is set to 0, the upsert will behave as an insert, so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion request, then the item will be skipped instead of failing the ingestion request. @@ -3064,9 +3064,9 @@ class CognitePointCloudRevision(_CognitePointCloudRevisionProperties, TypedNode) version (int): DMS version. last_updated_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. created_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. - status (Literal["Done", "Failed", "Processing", "Queued"] | None): The status field. + status (Literal['Done', 'Failed', 'Processing', 'Queued'] | None): The status field. published (bool | None): The published field. - revision_type (Literal["CAD", "Image360", "PointCloud"] | None): The revision type field. + revision_type (Literal['CAD', 'Image360', 'PointCloud'] | None): The revision type field. model_3d (DirectRelationReference | None): . revision_id (int | None): The 3D API revision identifier for this PointCloud model type (DirectRelationReference | None): Direct relation pointing to the type node. @@ -3139,7 +3139,7 @@ class CognitePointCloudVolumeApply(_CognitePointCloudVolumeProperties, TypedNode model_3d (DirectRelationReference | tuple[str, str] | None): Direct relation to Cognite3DModel instance volume_references (list[str] | None): Unique volume metric hashes used to access the 3D specialized data storage revisions (list[DirectRelationReference | tuple[str, str]] | None): List of direct relations to revision information - volume_type (Literal["Box", "Cylinder"] | None): Type of volume (Cylinder or Box) + volume_type (Literal['Box', 'Cylinder'] | None): Type of volume (Cylinder or Box) volume (list[float] | None): Relevant coordinates for the volume type, 9 floats in total, that defines the volume format_version (str | None): Specifies the version the 'volume' field is following. Volume definition is today 9 floats (property volume) existing_version (int | None): Fail the ingestion request if the node's version is greater than or equal to this value. If no existingVersion is specified, the ingestion will always overwrite any existing data for the node (for the specified container or node). If existingVersion is set to 0, the upsert will behave as an insert, so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion request, then the item will be skipped instead of failing the ingestion request. @@ -3199,7 +3199,7 @@ class CognitePointCloudVolume(_CognitePointCloudVolumeProperties, TypedNode): model_3d (DirectRelationReference | None): Direct relation to Cognite3DModel instance volume_references (list[str] | None): Unique volume metric hashes used to access the 3D specialized data storage revisions (list[DirectRelationReference] | None): List of direct relations to revision information - volume_type (Literal["Box", "Cylinder"] | None): Type of volume (Cylinder or Box) + volume_type (Literal['Box', 'Cylinder'] | None): Type of volume (Cylinder or Box) volume (list[float] | None): Relevant coordinates for the volume type, 9 floats in total, that defines the volume format_version (str | None): Specifies the version the 'volume' field is following. Volume definition is today 9 floats (property volume) type (DirectRelationReference | None): Direct relation pointing to the type node. @@ -3622,7 +3622,7 @@ class CogniteTimeSeriesApply(_CogniteTimeSeriesProperties, TypedNodeApply): space (str): The space where the node is located. external_id (str): The external id of the Cognite time series. is_step (bool): Specifies whether the time series is a step time series or not. - time_series_type (Literal["numeric", "string"]): Specifies the data type of the data points. + time_series_type (Literal['numeric', 'string']): Specifies the data type of the data points. name (str | None): Name of the instance description (str | None): Description of the instance tags (list[str] | None): Text based labels for generic use, limited to 1000 @@ -3700,7 +3700,7 @@ class CogniteTimeSeries(_CogniteTimeSeriesProperties, TypedNode): last_updated_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. created_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. is_step (bool): Specifies whether the time series is a step time series or not. - time_series_type (Literal["numeric", "string"]): Specifies the data type of the data points. + time_series_type (Literal['numeric', 'string']): Specifies the data type of the data points. name (str | None): Name of the instance description (str | None): Description of the instance tags (list[str] | None): Text based labels for generic use, limited to 1000 @@ -4033,7 +4033,7 @@ class Cognite360ImageAnnotationApply(_Cognite360ImageAnnotationProperties, Typed source_created_user (str | None): User identifier from the source system on who created the source data. This identifier is not guaranteed to match the user identifiers in CDF source_updated_user (str | None): User identifier from the source system on who last updated the source data. This identifier is not guaranteed to match the user identifiers in CDF confidence (float | None): The confidence that the annotation is a good match - status (Literal["Approved", "Rejected", "Suggested"] | None): The status of the annotation + status (Literal['Approved', 'Rejected', 'Suggested'] | None): The status of the annotation polygon (list[float] | None): List of floats representing the polygon. Format depends on formatVersion format_version (str | None): Specifies the storage representation for the polygon existing_version (int | None): Fail the ingestion request if the edge's version is greater than or equal to this value. If no existingVersion is specified, the ingestion will always overwrite any existing data for the edge (for the specified container or edge). If existingVersion is set to 0, the upsert will behave as an insert, so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion request, then the item will be skipped instead of failing the ingestion request. @@ -4108,7 +4108,7 @@ class Cognite360ImageAnnotation(_Cognite360ImageAnnotationProperties, TypedEdge) source_created_user (str | None): User identifier from the source system on who created the source data. This identifier is not guaranteed to match the user identifiers in CDF source_updated_user (str | None): User identifier from the source system on who last updated the source data. This identifier is not guaranteed to match the user identifiers in CDF confidence (float | None): The confidence that the annotation is a good match - status (Literal["Approved", "Rejected", "Suggested"] | None): The status of the annotation + status (Literal['Approved', 'Rejected', 'Suggested'] | None): The status of the annotation polygon (list[float] | None): List of floats representing the polygon. Format depends on formatVersion format_version (str | None): Specifies the storage representation for the polygon deleted_time (int | None): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. Timestamp when the instance was soft deleted. Note that deleted instances are filtered out of query results, but present in sync results @@ -4363,7 +4363,7 @@ class CogniteAnnotationApply(_CogniteAnnotationProperties, TypedEdgeApply): source_created_user (str | None): User identifier from the source system on who created the source data. This identifier is not guaranteed to match the user identifiers in CDF source_updated_user (str | None): User identifier from the source system on who last updated the source data. This identifier is not guaranteed to match the user identifiers in CDF confidence (float | None): The confidence that the annotation is a good match - status (Literal["Approved", "Rejected", "Suggested"] | None): The status of the annotation + status (Literal['Approved', 'Rejected', 'Suggested'] | None): The status of the annotation existing_version (int | None): Fail the ingestion request if the edge's version is greater than or equal to this value. If no existingVersion is specified, the ingestion will always overwrite any existing data for the edge (for the specified container or edge). If existingVersion is set to 0, the upsert will behave as an insert, so it will fail the bulk if the item already exists. If skipOnVersionConflict is set on the ingestion request, then the item will be skipped instead of failing the ingestion request. """ @@ -4433,7 +4433,7 @@ class CogniteAnnotation(_CogniteAnnotationProperties, TypedEdge): source_created_user (str | None): User identifier from the source system on who created the source data. This identifier is not guaranteed to match the user identifiers in CDF source_updated_user (str | None): User identifier from the source system on who last updated the source data. This identifier is not guaranteed to match the user identifiers in CDF confidence (float | None): The confidence that the annotation is a good match - status (Literal["Approved", "Rejected", "Suggested"] | None): The status of the annotation + status (Literal['Approved', 'Rejected', 'Suggested'] | None): The status of the annotation deleted_time (int | None): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. Timestamp when the instance was soft deleted. Note that deleted instances are filtered out of query results, but present in sync results """ @@ -4657,7 +4657,7 @@ class CogniteDiagramAnnotationApply(_CogniteDiagramAnnotationProperties, TypedEd source_created_user (str | None): User identifier from the source system on who created the source data. This identifier is not guaranteed to match the user identifiers in CDF source_updated_user (str | None): User identifier from the source system on who last updated the source data. This identifier is not guaranteed to match the user identifiers in CDF confidence (float | None): The confidence that the annotation is a good match - status (Literal["Approved", "Rejected", "Suggested"] | None): The status of the annotation + status (Literal['Approved', 'Rejected', 'Suggested'] | None): The status of the annotation start_node_page_number (int | None): The number of the page on which this annotation is located in `startNode` File. The first page has number 1 end_node_page_number (int | None): The number of the page on which this annotation is located in the endNode File if an endNode is present. The first page has number 1 start_node_x_min (float | None): Value between [0,1]. Minimum abscissa of the bounding box (left edge). Must be strictly less than startNodeXMax @@ -4763,7 +4763,7 @@ class CogniteDiagramAnnotation(_CogniteDiagramAnnotationProperties, TypedEdge): source_created_user (str | None): User identifier from the source system on who created the source data. This identifier is not guaranteed to match the user identifiers in CDF source_updated_user (str | None): User identifier from the source system on who last updated the source data. This identifier is not guaranteed to match the user identifiers in CDF confidence (float | None): The confidence that the annotation is a good match - status (Literal["Approved", "Rejected", "Suggested"] | None): The status of the annotation + status (Literal['Approved', 'Rejected', 'Suggested'] | None): The status of the annotation start_node_page_number (int | None): The number of the page on which this annotation is located in `startNode` File. The first page has number 1 end_node_page_number (int | None): The number of the page on which this annotation is located in the endNode File if an endNode is present. The first page has number 1 start_node_x_min (float | None): Value between [0,1]. Minimum abscissa of the bounding box (left edge). Must be strictly less than startNodeXMax diff --git a/cognite/client/data_classes/data_modeling/containers.py b/cognite/client/data_classes/data_modeling/containers.py index a4c9ba977e..60135ce5fd 100644 --- a/cognite/client/data_classes/data_modeling/containers.py +++ b/cognite/client/data_classes/data_modeling/containers.py @@ -81,7 +81,7 @@ class ContainerApply(ContainerCore): properties (dict[str, ContainerProperty]): We index the property by a local unique identifier. description (str | None): Textual description of the container name (str | None): Human readable name for the container. - used_for (Literal["node", "edge", "all"] | None): Should this operation apply to nodes, edges or both. + used_for (Literal['node', 'edge', 'all'] | None): Should this operation apply to nodes, edges or both. constraints (dict[str, Constraint] | None): Set of constraints to apply to the container indexes (dict[str, Index] | None): Set of indexes to apply to the container. """ @@ -133,7 +133,7 @@ class Container(ContainerCore): created_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. description (str | None): Textual description of the container name (str | None): Human readable name for the container. - used_for (Literal["node", "edge", "all"]): Should this operation apply to nodes, edges or both. + used_for (Literal['node', 'edge', 'all']): Should this operation apply to nodes, edges or both. constraints (dict[str, Constraint] | None): Set of constraints to apply to the container indexes (dict[str, Index] | None): Set of indexes to apply to the container. """ diff --git a/cognite/client/data_classes/data_modeling/data_models.py b/cognite/client/data_classes/data_modeling/data_models.py index bd5ba3f835..9686c000e0 100644 --- a/cognite/client/data_classes/data_modeling/data_models.py +++ b/cognite/client/data_classes/data_modeling/data_models.py @@ -1,8 +1,9 @@ from __future__ import annotations from abc import ABC +from collections.abc import Sequence from operator import attrgetter -from typing import TYPE_CHECKING, Any, Generic, Literal, Sequence, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, Generic, Literal, TypeVar, cast from typing_extensions import Self @@ -102,7 +103,7 @@ def as_write(self) -> DataModelApply: return self -T_View = TypeVar("T_View", bound=Union[ViewId, View]) +T_View = TypeVar("T_View", bound=ViewId | View) class DataModel(DataModelCore, Generic[T_View]): @@ -221,7 +222,7 @@ def latest_version(self, key: Literal["created_time", "last_updated_time"] = "cr created_time or last_updated_time field. Args: - key (Literal["created_time", "last_updated_time"]): The field to use for determining the latest version. + key (Literal['created_time', 'last_updated_time']): The field to use for determining the latest version. Returns: DataModel[T_View]: The data model with the latest version. diff --git a/cognite/client/data_classes/data_modeling/data_types.py b/cognite/client/data_classes/data_modeling/data_types.py index 53e91e7468..26c4a59d45 100644 --- a/cognite/client/data_classes/data_modeling/data_types.py +++ b/cognite/client/data_classes/data_modeling/data_types.py @@ -3,9 +3,9 @@ import logging from abc import ABC from dataclasses import asdict, dataclass -from typing import TYPE_CHECKING, Any, ClassVar, cast +from typing import TYPE_CHECKING, Any, ClassVar, TypeAlias, cast -from typing_extensions import Self, TypeAlias +from typing_extensions import Self from cognite.client.data_classes._base import CogniteObject, UnknownCogniteObject from cognite.client.data_classes.data_modeling.ids import ContainerId diff --git a/cognite/client/data_classes/data_modeling/extractor_extensions/v1.py b/cognite/client/data_classes/data_modeling/extractor_extensions/v1.py index e1a2d95834..3af7ef78aa 100644 --- a/cognite/client/data_classes/data_modeling/extractor_extensions/v1.py +++ b/cognite/client/data_classes/data_modeling/extractor_extensions/v1.py @@ -316,7 +316,7 @@ class CogniteExtractorTimeSeriesApply(_CogniteExtractorTimeSeriesProperties, Typ space (str): The space where the node is located. external_id (str): The external id of the Cognite extractor time series. is_step (bool): Defines whether the time series is a step series or not. - time_series_type (Literal["numeric", "string"]): Defines data type of the data points. + time_series_type (Literal['numeric', 'string']): Defines data type of the data points. name (str | None): Name of the instance description (str | None): Description of the instance tags (list[str] | None): Text based labels for generic use, limited to 1000 @@ -396,7 +396,7 @@ class CogniteExtractorTimeSeries(_CogniteExtractorTimeSeriesProperties, TypedNod last_updated_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. created_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. is_step (bool): Defines whether the time series is a step series or not. - time_series_type (Literal["numeric", "string"]): Defines data type of the data points. + time_series_type (Literal['numeric', 'string']): Defines data type of the data points. name (str | None): Name of the instance description (str | None): Description of the instance tags (list[str] | None): Text based labels for generic use, limited to 1000 diff --git a/cognite/client/data_classes/data_modeling/ids.py b/cognite/client/data_classes/data_modeling/ids.py index 8bb97e19c3..5eda8c6252 100644 --- a/cognite/client/data_classes/data_modeling/ids.py +++ b/cognite/client/data_classes/data_modeling/ids.py @@ -1,8 +1,9 @@ from __future__ import annotations from abc import ABC +from collections.abc import Sequence from dataclasses import asdict, dataclass, field -from typing import TYPE_CHECKING, Any, ClassVar, Literal, Protocol, Sequence, Tuple, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, ClassVar, Literal, Protocol, TypeVar, cast from typing_extensions import Self @@ -178,15 +179,15 @@ class VersionedIdLike(IdLike, Protocol): def version(self) -> str | None: ... -ContainerIdentifier = Union[ContainerId, Tuple[str, str]] -ConstraintIdentifier = Tuple[ContainerId, str] -IndexIdentifier = Tuple[ContainerId, str] -ViewIdentifier = Union[ViewId, Tuple[str, str], Tuple[str, str, str]] -DataModelIdentifier = Union[DataModelId, Tuple[str, str], Tuple[str, str, str]] -NodeIdentifier = Union[NodeId, Tuple[str, str, str]] -EdgeIdentifier = Union[EdgeId, Tuple[str, str, str]] +ContainerIdentifier = ContainerId | tuple[str, str] +ConstraintIdentifier = tuple[ContainerId, str] +IndexIdentifier = tuple[ContainerId, str] +ViewIdentifier = ViewId | tuple[str, str] | tuple[str, str, str] +DataModelIdentifier = DataModelId | tuple[str, str] | tuple[str, str, str] +NodeIdentifier = NodeId | tuple[str, str, str] +EdgeIdentifier = EdgeId | tuple[str, str, str] -Id = Union[Tuple[str, str], Tuple[str, str, str], IdLike, VersionedIdLike] +Id = tuple[str, str] | tuple[str, str, str] | IdLike | VersionedIdLike def _load_space_identifier(ids: str | SequenceNotStr[str]) -> DataModelingIdentifierSequence: diff --git a/cognite/client/data_classes/data_modeling/instances.py b/cognite/client/data_classes/data_modeling/instances.py index 1edffcaee8..0e30e30c17 100644 --- a/cognite/client/data_classes/data_modeling/instances.py +++ b/cognite/client/data_classes/data_modeling/instances.py @@ -5,7 +5,17 @@ import warnings from abc import ABC, abstractmethod from collections import UserDict, defaultdict -from collections.abc import Iterable +from collections.abc import ( + Collection, + ItemsView, + Iterable, + Iterator, + KeysView, + Mapping, + MutableMapping, + Sequence, + ValuesView, +) from dataclasses import dataclass from datetime import date, datetime from functools import lru_cache @@ -13,26 +23,16 @@ from typing import ( TYPE_CHECKING, Any, - Collection, - Dict, Generic, - ItemsView, - Iterator, - KeysView, - List, Literal, - Mapping, - MutableMapping, NoReturn, - Sequence, + TypeAlias, TypeVar, - Union, - ValuesView, cast, overload, ) -from typing_extensions import Self, TypeAlias +from typing_extensions import Self from cognite.client.data_classes._base import ( CogniteObject, @@ -73,38 +73,29 @@ from cognite.client import CogniteClient -PropertyValue: TypeAlias = Union[ - str, - int, - float, - bool, - dict, - List[str], - List[int], - List[float], - List[bool], - List[dict], -] -PropertyValueWrite: TypeAlias = Union[ - str, - int, - float, - bool, - dict, - SequenceNotStr[str], - Sequence[int], - Sequence[float], - Sequence[bool], - Sequence[dict], - NodeId, - DirectRelationReference, - date, - datetime, - Sequence[Union[NodeId, DirectRelationReference]], - Sequence[date], - Sequence[datetime], - None, -] +PropertyValue: TypeAlias = ( + str | int | float | bool | dict | list[str] | list[int] | list[float] | list[bool] | list[dict] +) +PropertyValueWrite: TypeAlias = ( + str + | int + | float + | bool + | dict + | SequenceNotStr[str] + | Sequence[int] + | Sequence[float] + | Sequence[bool] + | Sequence[dict] + | NodeId + | DirectRelationReference + | date + | datetime + | Sequence[NodeId | DirectRelationReference] + | Sequence[date] + | Sequence[datetime] + | None +) Space: TypeAlias = str PropertyIdentifier: TypeAlias = str @@ -158,7 +149,7 @@ class InstanceCore(DataModelingResource, ABC): Args: space (str): The workspace for the instance, a unique identifier for the space. external_id (str): Combined with the space is the unique identifier of the instance. - instance_type (Literal["node", "edge"]): The type of instance. + instance_type (Literal['node', 'edge']): The type of instance. """ def __init__(self, space: str, external_id: str, instance_type: Literal["node", "edge"]) -> None: @@ -180,7 +171,7 @@ class InstanceApply(WritableInstanceCore[T_CogniteResource], ABC): Args: space (str): The workspace for the instance, a unique identifier for the space. external_id (str): Combined with the space is the unique identifier of the instance. - instance_type (Literal["node", "edge"]): The type of instance. + instance_type (Literal['node', 'edge']): The type of instance. existing_version (int | None): Fail the ingestion request if the instance's version is greater than or equal to this value. If no existingVersion is specified, the ingestion will always overwrite any existing data for the instance (for the specified container or instance). If existingVersion is set to 0, the upsert will behave as an insert, so it will fail the bulk if the instance already exists. If skipOnVersionConflict is set on the ingestion request, then the instance will be skipped instead of failing the ingestion request. sources (list[NodeOrEdgeData] | None): List of source properties to write. The properties are from the instance and/or container the container(s) making up this node. """ @@ -254,7 +245,7 @@ def dump(self) -> dict[Space, dict[str, dict[PropertyIdentifier, PropertyValue]] props: dict[Space, dict[str, dict[PropertyIdentifier, PropertyValue]]] = defaultdict(dict) for view_id, properties in self.data.items(): view_id_str = f"{view_id.external_id}/{view_id.version}" - props[view_id.space][view_id_str] = cast(Dict[PropertyIdentifier, PropertyValue], properties) + props[view_id.space][view_id_str] = cast(dict[PropertyIdentifier, PropertyValue], properties) # Defaultdict is not yaml serializable return dict(props) @@ -328,7 +319,7 @@ class Instance(WritableInstanceCore[T_CogniteResource], ABC): version (int): Current version of the instance. last_updated_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. created_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. - instance_type (Literal["node", "edge"]): The type of instance. + instance_type (Literal['node', 'edge']): The type of instance. deleted_time (int | None): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. Timestamp when the instance was soft deleted. Note that deleted instances are filtered out of query results, but present in sync results properties (Properties | None): Properties of the instance. """ @@ -484,7 +475,7 @@ class InstanceApplyResult(InstanceCore, ABC): """A node or edge. This represents the update on the instance. Args: - instance_type (Literal["node", "edge"]): The type of instance. + instance_type (Literal['node', 'edge']): The type of instance. space (str): The workspace for the instance, a unique identifier for the space. external_id (str): Combined with the space is the unique identifier of the instance. version (int): DMS version of the instance. @@ -537,7 +528,7 @@ def _load(cls, resource: dict, cognite_client: CogniteClient | None = None) -> S """ return cls( aggregates=[AggregatedNumberedValue.load(agg) for agg in resource["aggregates"]], - group=cast(Dict[str, Union[str, int, float, bool]], resource.get("group")), + group=cast(dict[str, str | int | float | bool], resource.get("group")), ) def dump(self, camel_case: bool = True) -> dict[str, Any]: diff --git a/cognite/client/data_classes/data_modeling/query.py b/cognite/client/data_classes/data_modeling/query.py index 809e18f78e..2f2a7fba18 100644 --- a/cognite/client/data_classes/data_modeling/query.py +++ b/cognite/client/data_classes/data_modeling/query.py @@ -1,10 +1,10 @@ from __future__ import annotations -import warnings from abc import ABC, abstractmethod from collections import UserDict +from collections.abc import Mapping, Sequence from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Any, Literal, Mapping, Sequence, cast +from typing import TYPE_CHECKING, Any, Literal, cast from typing_extensions import Self @@ -150,14 +150,6 @@ def dump(self, camel_case: bool = True) -> dict[str, Any]: output["cursors"] = dict(self.cursors.items()) return output - @classmethod - def load_yaml(cls, data: str) -> Query: - warnings.warn( - "Query.load_yaml is deprecated and will be removed after Oct 2024, please use Query.load", - UserWarning, - ) - return cls.load(data) - @classmethod def _load(cls, resource: dict[str, Any], cognite_client: CogniteClient | None = None) -> Self: parameters = dict(resource["parameters"].items()) if "parameters" in resource else None @@ -244,17 +236,9 @@ class NodeResultSetExpression(ResultSetExpression): filter (Filter | None): Filter the result set based on this filter. sort (list[InstanceSort] | None): Sort the result set based on this list of sort criteria. limit (int | None): Limit the result set to this number of instances. - through (list[str] | tuple[str, str, str] | PropertyId | None): Chain your result-expression through this - container or view. The property must be a reference to a direct relation property. `from_` must be defined. - The tuple must be on the form (space, container, property) or (space, view/version, property). - direction (Literal["outwards", "inwards"]): The direction to use when traversing direct relations. - Only applicable when through is specified. - chain_to (Literal["destination", "source"]): Control which side of the edge to chain to. - The chain_to option is only applicable if the result rexpression referenced in `from` - contains edges. `source` will chain to start if you're following edges outwards i.e `direction=outwards`. If you're - following edges inwards i.e `direction=inwards`, it will chain to end. `destination` (default) will chain to - end if you're following edges outwards i.e `direction=outwards`. If you're following edges - inwards i.e, `direction=inwards`, it will chain to start. + through (list[str] | tuple[str, str, str] | PropertyId | None): Chain your result-expression through this container or view. The property must be a reference to a direct relation property. `from_` must be defined. The tuple must be on the form (space, container, property) or (space, view/version, property). + direction (Literal['outwards', 'inwards']): The direction to use when traversing direct relations. Only applicable when through is specified. + chain_to (Literal['destination', 'source']): Control which side of the edge to chain to. The chain_to option is only applicable if the result rexpression referenced in `from` contains edges. `source` will chain to start if you're following edges outwards i.e `direction=outwards`. If you're following edges inwards i.e `direction=inwards`, it will chain to end. `destination` (default) will chain to end if you're following edges outwards i.e `direction=outwards`. If you're following edges inwards i.e, `direction=inwards`, it will chain to start. """ def __init__( @@ -315,22 +299,15 @@ class EdgeResultSetExpression(ResultSetExpression): Args: from_ (str | None): Chain your result expression from this edge. max_distance (int | None): The largest - max - number of levels to traverse. - direction (Literal["outwards", "inwards"]): The direction to use when traversing. + direction (Literal['outwards', 'inwards']): The direction to use when traversing. filter (Filter | None): Filter the result set based on this filter. node_filter (Filter | None): Filter the result set based on this filter. termination_filter (Filter | None): Filter the result set based on this filter. - limit_each (int | None): Limit the number of returned edges for each of the source nodes in the result set. - The indicated uniform limit applies to the result set from the referenced from. - limitEach only has meaning when you also specify maxDistance=1 and from. + limit_each (int | None): Limit the number of returned edges for each of the source nodes in the result set. The indicated uniform limit applies to the result set from the referenced from. limitEach only has meaning when you also specify maxDistance=1 and from. sort (list[InstanceSort] | None): Sort the result set based on this list of sort criteria. post_sort (list[InstanceSort] | None): Sort the result set based on this list of sort criteria. limit (int | None): Limit the result set to this number of instances. - chain_to (Literal["destination", "source"]): Control which side of the edge to chain to. - The chain_to option is only applicable if the result rexpression referenced in `from` - contains edges. `source` will chain to start if you're following edges outwards i.e `direction=outwards`. If you're - following edges inwards i.e `direction=inwards`, it will chain to end. `destination` (default) will chain to - end if you're following edges outwards i.e `direction=outwards`. If you're following edges - inwards i.e, `direction=inwards`, it will chain to start. + chain_to (Literal['destination', 'source']): Control which side of the edge to chain to. The chain_to option is only applicable if the result rexpression referenced in `from` contains edges. `source` will chain to start if you're following edges outwards i.e `direction=outwards`. If you're following edges inwards i.e `direction=inwards`, it will chain to end. `destination` (default) will chain to end if you're following edges outwards i.e `direction=outwards`. If you're following edges inwards i.e, `direction=inwards`, it will chain to start. """ diff --git a/cognite/client/data_classes/data_modeling/spaces.py b/cognite/client/data_classes/data_modeling/spaces.py index d48385509c..29d1315ee0 100644 --- a/cognite/client/data_classes/data_modeling/spaces.py +++ b/cognite/client/data_classes/data_modeling/spaces.py @@ -1,7 +1,8 @@ from __future__ import annotations from abc import ABC -from typing import TYPE_CHECKING, Any, Iterable +from collections.abc import Iterable +from typing import TYPE_CHECKING, Any from typing_extensions import Self diff --git a/cognite/client/data_classes/data_modeling/views.py b/cognite/client/data_classes/data_modeling/views.py index f673e53afa..458f259bc3 100644 --- a/cognite/client/data_classes/data_modeling/views.py +++ b/cognite/client/data_classes/data_modeling/views.py @@ -3,9 +3,9 @@ import warnings from abc import ABC, abstractmethod from dataclasses import asdict, dataclass -from typing import TYPE_CHECKING, Any, Literal, TypeVar, cast +from typing import TYPE_CHECKING, Any, Literal, TypeAlias, TypeVar, cast -from typing_extensions import Self, TypeAlias +from typing_extensions import Self from cognite.client.data_classes._base import ( CogniteFilter, @@ -162,7 +162,7 @@ class View(ViewCore): filter (Filter | None): A filter Domain Specific Language (DSL) used to create advanced filter queries. implements (list[ViewId] | None): References to the views from where this view will inherit properties and edges. writable (bool): Whether the view supports write operations. - used_for (Literal["node", "edge", "all"]): Does this view apply to nodes, edges or both. + used_for (Literal['node', 'edge', 'all']): Does this view apply to nodes, edges or both. is_global (bool): Whether this is a global view. """ diff --git a/cognite/client/data_classes/datapoints.py b/cognite/client/data_classes/datapoints.py index 8705143987..8bf1998a9f 100644 --- a/cognite/client/data_classes/datapoints.py +++ b/cognite/client/data_classes/datapoints.py @@ -6,6 +6,7 @@ import typing import warnings from collections import defaultdict +from collections.abc import Collection, Iterator, Sequence from dataclasses import InitVar, dataclass, fields from enum import IntEnum from functools import cached_property @@ -13,10 +14,7 @@ TYPE_CHECKING, Any, ClassVar, - Collection, - Iterator, Literal, - Sequence, TypedDict, cast, overload, @@ -812,7 +810,7 @@ def to_pandas( # type: ignore [override] """Convert the DatapointsArray into a pandas DataFrame. Args: - column_names (Literal["id", "external_id", "instance_id"]): Which field to use for the columns. Defaults to "instance_id", if it exists, then uses "external_id" if available, and "id" as fallback. + column_names (Literal['id', 'external_id', 'instance_id']): Which field to use for the columns. Defaults to "instance_id", if it exists, then uses "external_id" if available, and "id" as fallback. include_aggregate_name (bool): Include aggregate in the column name include_granularity_name (bool): Include granularity in the column name (after aggregate if present) include_status (bool): Include status code and status symbol as separate columns, if available. @@ -1034,7 +1032,7 @@ def to_pandas( # type: ignore [override] """Convert the datapoints into a pandas DataFrame. Args: - column_names (Literal["id", "external_id", "instance_id"]): Which field to use for the columns. Defaults to "instance_id", if it exists, then uses "external_id" if available, and "id" as fallback. + column_names (Literal['id', 'external_id', 'instance_id']): Which field to use for the columns. Defaults to "instance_id", if it exists, then uses "external_id" if available, and "id" as fallback. include_aggregate_name (bool): Include aggregate in the column name include_granularity_name (bool): Include granularity in the column name (after aggregate if present) include_errors (bool): For synthetic datapoint queries, include a column with errors. @@ -1327,7 +1325,7 @@ def to_pandas( # type: ignore [override] """Convert the DatapointsArrayList into a pandas DataFrame. Args: - column_names (Literal["id", "external_id", "instance_id"]): Which field to use for the columns. Defaults to "instance_id", if it exists, then uses "external_id" if available, and "id" as fallback. + column_names (Literal['id', 'external_id', 'instance_id']): Which field to use for the columns. Defaults to "instance_id", if it exists, then uses "external_id" if available, and "id" as fallback. include_aggregate_name (bool): Include aggregate in the column name include_granularity_name (bool): Include granularity in the column name (after aggregate if present) include_status (bool): Include status code and status symbol as separate columns, if available. @@ -1414,7 +1412,7 @@ def to_pandas( # type: ignore [override] """Convert the datapoints list into a pandas DataFrame. Args: - column_names (Literal["id", "external_id", "instance_id"]): Which field to use for the columns. Defaults to "instance_id", if it exists, then uses "external_id" if available, and "id" as fallback. + column_names (Literal['id', 'external_id', 'instance_id']): Which field to use for the columns. Defaults to "instance_id", if it exists, then uses "external_id" if available, and "id" as fallback. include_aggregate_name (bool): Include aggregate in the column name include_granularity_name (bool): Include granularity in the column name (after aggregate if present) include_status (bool): Include status code and status symbol as separate columns, if available. diff --git a/cognite/client/data_classes/datapoints_subscriptions.py b/cognite/client/data_classes/datapoints_subscriptions.py index d6a7b11742..67fd757fa3 100644 --- a/cognite/client/data_classes/datapoints_subscriptions.py +++ b/cognite/client/data_classes/datapoints_subscriptions.py @@ -3,9 +3,9 @@ from abc import ABC from dataclasses import dataclass from enum import auto -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, TypeAlias -from typing_extensions import Self, TypeAlias +from typing_extensions import Self from cognite.client.data_classes import Datapoints from cognite.client.data_classes._base import ( diff --git a/cognite/client/data_classes/documents.py b/cognite/client/data_classes/documents.py index c4b11a4506..0e6568bf4c 100644 --- a/cognite/client/data_classes/documents.py +++ b/cognite/client/data_classes/documents.py @@ -1,10 +1,11 @@ from __future__ import annotations +from collections.abc import Collection from dataclasses import dataclass from enum import auto -from typing import TYPE_CHECKING, Any, Collection, List, Literal, Union, cast +from typing import TYPE_CHECKING, Any, Literal, TypeAlias, cast -from typing_extensions import Self, TypeAlias +from typing_extensions import Self from cognite.client.data_classes._base import ( CogniteObject, @@ -26,7 +27,7 @@ class DocumentsGeoJsonGeometry(CogniteObject): """Represents the points, curves and surfaces in the coordinate space. Args: - type (Literal["Point", "MultiPoint", "LineString", "MultiLineString", "Polygon", "MultiPolygon", "GeometryCollection"]): The geometry type. + type (Literal['Point', 'MultiPoint', 'LineString', 'MultiLineString', 'Polygon', 'MultiPolygon', 'GeometryCollection']): The geometry type. coordinates (list | None): An array of the coordinates of the geometry. The structure of the elements in this array is determined by the type of geometry. geometries (Collection[Geometry] | None): No description. @@ -436,7 +437,7 @@ class DocumentProperty(EnumProperty): content = auto() -SortableProperty: TypeAlias = Union[SortableSourceFileProperty, SortableDocumentProperty, str, List[str]] +SortableProperty: TypeAlias = SortableSourceFileProperty | SortableDocumentProperty | str | list[str] class DocumentSort(CogniteSort): diff --git a/cognite/client/data_classes/events.py b/cognite/client/data_classes/events.py index 4925953e8a..85435fac7b 100644 --- a/cognite/client/data_classes/events.py +++ b/cognite/client/data_classes/events.py @@ -1,10 +1,9 @@ from __future__ import annotations from abc import ABC +from collections.abc import Sequence from enum import auto -from typing import TYPE_CHECKING, Any, List, Literal, Sequence, Union, cast - -from typing_extensions import TypeAlias +from typing import TYPE_CHECKING, Any, Literal, TypeAlias, cast from cognite.client.data_classes._base import ( CogniteFilter, @@ -409,7 +408,7 @@ def metadata_key(key: str) -> list[str]: return ["metadata", key] -EventPropertyLike: TypeAlias = Union[EventProperty, str, List[str]] +EventPropertyLike: TypeAlias = EventProperty | str | list[str] class SortableEventProperty(EnumProperty): @@ -430,7 +429,7 @@ def metadata_key(key: str) -> list[str]: return ["metadata", key] -SortableEventPropertyLike: TypeAlias = Union[SortableEventProperty, str, List[str]] +SortableEventPropertyLike: TypeAlias = SortableEventProperty | str | list[str] class EventSort(CogniteSort): diff --git a/cognite/client/data_classes/extractionpipelines.py b/cognite/client/data_classes/extractionpipelines.py index 43d2f2f335..ed7dfa12a2 100644 --- a/cognite/client/data_classes/extractionpipelines.py +++ b/cognite/client/data_classes/extractionpipelines.py @@ -472,7 +472,7 @@ class ExtractionPipelineRunWrite(ExtractionPipelineRunCore): Args: extpipe_external_id (str): The external ID of the extraction pipeline. - status (Literal["success", "failure", "seen"]): success/failure/seen. + status (Literal['success', 'failure', 'seen']): success/failure/seen. message (str | None): Optional status message. created_time (int | None): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. """ diff --git a/cognite/client/data_classes/files.py b/cognite/client/data_classes/files.py index 6a3eced9a1..2565f513ce 100644 --- a/cognite/client/data_classes/files.py +++ b/cognite/client/data_classes/files.py @@ -1,8 +1,9 @@ from __future__ import annotations from abc import ABC +from collections.abc import Sequence from types import TracebackType -from typing import TYPE_CHECKING, Any, BinaryIO, Literal, Sequence, TextIO, TypeVar, cast +from typing import TYPE_CHECKING, Any, BinaryIO, Literal, TextIO, TypeVar, cast from cognite.client.data_classes._base import ( CogniteFilter, diff --git a/cognite/client/data_classes/filters.py b/cognite/client/data_classes/filters.py index 9912bcbfd7..5a45aac45e 100644 --- a/cognite/client/data_classes/filters.py +++ b/cognite/client/data_classes/filters.py @@ -1,10 +1,9 @@ from __future__ import annotations from abc import ABC, abstractmethod +from collections.abc import Mapping, Sequence from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, List, Literal, Mapping, NoReturn, Sequence, Tuple, Union, cast, final - -from typing_extensions import TypeAlias +from typing import TYPE_CHECKING, Any, Literal, NoReturn, TypeAlias, cast, final from cognite.client.data_classes._base import EnumProperty, Geometry from cognite.client.data_classes.labels import Label @@ -15,9 +14,9 @@ from cognite.client.data_classes.data_modeling.ids import ContainerId, ViewId -PropertyReference: TypeAlias = Union[str, Tuple[str, ...], List[str], EnumProperty] +PropertyReference: TypeAlias = str | tuple[str, ...] | list[str] | EnumProperty -RawValue: TypeAlias = Union[str, float, bool, Sequence, Mapping[str, Any], Label] +RawValue: TypeAlias = str | float | bool | Sequence | Mapping[str, Any] | Label @dataclass @@ -30,8 +29,8 @@ class ParameterValue: parameter: str -FilterValue: TypeAlias = Union[RawValue, PropertyReferenceValue, ParameterValue] -FilterValueList: TypeAlias = Union[Sequence[RawValue], PropertyReferenceValue, ParameterValue] +FilterValue: TypeAlias = RawValue | PropertyReferenceValue | ParameterValue +FilterValueList: TypeAlias = Sequence[RawValue] | PropertyReferenceValue | ParameterValue def _dump_filter_value(value: FilterValueList | FilterValue) -> Any: @@ -834,7 +833,7 @@ class SpaceFilter(FilterWithProperty): Args: space (str | SequenceNotStr[str]): The space (or spaces) to filter on. - instance_type (Literal["node", "edge"]): Type of instance to filter on. Defaults to "node". + instance_type (Literal['node', 'edge']): Type of instance to filter on. Defaults to "node". Example: Filter than can be used to retrieve nodes from space "space1" or "space2": diff --git a/cognite/client/data_classes/functions.py b/cognite/client/data_classes/functions.py index 2a2364166c..8e2b9ed892 100644 --- a/cognite/client/data_classes/functions.py +++ b/cognite/client/data_classes/functions.py @@ -2,9 +2,7 @@ import time from abc import ABC -from typing import TYPE_CHECKING, Any, Literal, cast - -from typing_extensions import TypeAlias +from typing import TYPE_CHECKING, Any, Literal, TypeAlias, cast from cognite.client._constants import DEFAULT_LIMIT_READ from cognite.client.data_classes._base import ( diff --git a/cognite/client/data_classes/geospatial.py b/cognite/client/data_classes/geospatial.py index 7a60a2dd5a..7ce87add32 100644 --- a/cognite/client/data_classes/geospatial.py +++ b/cognite/client/data_classes/geospatial.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, List, TypeVar, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from typing_extensions import Self @@ -701,7 +701,7 @@ def __init__( @classmethod def _load(cls, resource: dict[str, Any], cognite_client: CogniteClient | None = None) -> GeospatialComputedResponse: item_list = GeospatialComputedItemList._load( - cast(List[Any], resource.get("items")), cognite_client=cognite_client + cast(list[Any], resource.get("items")), cognite_client=cognite_client ) return cls(item_list, cognite_client=cognite_client) diff --git a/cognite/client/data_classes/hosted_extractors/jobs.py b/cognite/client/data_classes/hosted_extractors/jobs.py index a1ad577fb3..3471468c9f 100644 --- a/cognite/client/data_classes/hosted_extractors/jobs.py +++ b/cognite/client/data_classes/hosted_extractors/jobs.py @@ -2,9 +2,9 @@ from abc import ABC, abstractmethod from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, ClassVar, Literal, cast +from typing import TYPE_CHECKING, Any, ClassVar, Literal, TypeAlias, cast -from typing_extensions import Self, TypeAlias +from typing_extensions import Self from cognite.client.data_classes._base import ( CogniteObject, @@ -476,7 +476,7 @@ class JobLogs(CogniteResource): Args: job_external_id (str): The external ID of the job. - type (Literal["paused", "startup_error", "connection_error", "connected", "transform_error", "cdf_write_error", "ok"]): Type of log entry. + type (Literal['paused', 'startup_error', 'connection_error', 'connected', 'transform_error', 'cdf_write_error', 'ok']): Type of log entry. created_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. message (str | None): Log message. Not all log entries have messages. diff --git a/cognite/client/data_classes/hosted_extractors/mappings.py b/cognite/client/data_classes/hosted_extractors/mappings.py index 2c59138b3c..85700cf743 100644 --- a/cognite/client/data_classes/hosted_extractors/mappings.py +++ b/cognite/client/data_classes/hosted_extractors/mappings.py @@ -148,7 +148,7 @@ class MappingWrite(_MappingCore): external_id (str): The external ID provided by the client. Must be unique for the resource type. mapping (CustomMapping): The custom mapping. published (bool): Whether this mapping is published and should be available to be used in jobs. - input (InputMapping | Literal["csv", "json", "xml"]): The input mapping. Defaults to 'json' + input (InputMapping | Literal['csv', 'json', 'xml']): The input mapping. Defaults to 'json' """ def __init__( diff --git a/cognite/client/data_classes/hosted_extractors/sources.py b/cognite/client/data_classes/hosted_extractors/sources.py index 078c60c72e..6c9495b10c 100644 --- a/cognite/client/data_classes/hosted_extractors/sources.py +++ b/cognite/client/data_classes/hosted_extractors/sources.py @@ -795,7 +795,7 @@ class RestSourceWrite(SourceWrite): Args: external_id (str): The external ID provided by the client. Must be unique for the resource type. host (str): Host or IP address to connect to. - scheme (Literal["http", "https"]): Type of connection to establish. + scheme (Literal['http', 'https']): Type of connection to establish. port (int | None): Port on server to connect to. Uses default ports based on the scheme if omitted. ca_certificate (CACertificateWrite | None): Custom certificate authority certificate to let the source use a self signed certificate. auth_certificate (AuthCertificateWrite | None): Authentication certificate (if configured) used to authenticate to source. @@ -851,7 +851,7 @@ class RestSource(Source): Args: external_id (str): The external ID provided by the client. Must be unique for the resource type. host (str): Host or IP address to connect to. - scheme (Literal["http", "https"]): Type of connection to establish. + scheme (Literal['http', 'https']): Type of connection to establish. created_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. last_updated_time (int): The number of milliseconds since 00:00:00 Thursday, 1 January 1970, Coordinated Universal Time (UTC), minus leap seconds. port (int | None): Port on server to connect to. Uses default ports based on the scheme if omitted. diff --git a/cognite/client/data_classes/iam.py b/cognite/client/data_classes/iam.py index 8105fe0d6a..a9ae4c1a86 100644 --- a/cognite/client/data_classes/iam.py +++ b/cognite/client/data_classes/iam.py @@ -1,9 +1,10 @@ from __future__ import annotations from abc import ABC -from typing import TYPE_CHECKING, Any, Iterable, Literal, cast +from collections.abc import Iterable +from typing import TYPE_CHECKING, Any, Literal, TypeAlias, cast -from typing_extensions import Self, TypeAlias +from typing_extensions import Self from cognite.client.data_classes._base import ( CogniteResource, diff --git a/cognite/client/data_classes/labels.py b/cognite/client/data_classes/labels.py index 5b55231184..63da47d53a 100644 --- a/cognite/client/data_classes/labels.py +++ b/cognite/client/data_classes/labels.py @@ -1,7 +1,8 @@ from __future__ import annotations from abc import ABC -from typing import TYPE_CHECKING, Any, Sequence, cast +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any, cast from cognite.client.data_classes._base import ( CogniteFilter, diff --git a/cognite/client/data_classes/relationships.py b/cognite/client/data_classes/relationships.py index 2b1b1940fe..45c95b7cab 100644 --- a/cognite/client/data_classes/relationships.py +++ b/cognite/client/data_classes/relationships.py @@ -3,9 +3,9 @@ import copy import typing from abc import ABC -from typing import TYPE_CHECKING, Any, Literal, cast +from typing import TYPE_CHECKING, Any, Literal, TypeAlias, cast -from typing_extensions import Self, TypeAlias +from typing_extensions import Self from cognite.client.data_classes._base import ( CogniteFilter, diff --git a/cognite/client/data_classes/sequences.py b/cognite/client/data_classes/sequences.py index 9a520c2772..a1eb09e062 100644 --- a/cognite/client/data_classes/sequences.py +++ b/cognite/client/data_classes/sequences.py @@ -3,10 +3,11 @@ import typing import warnings from abc import ABC +from collections.abc import Iterator from enum import auto -from typing import TYPE_CHECKING, Any, Iterator, List, Literal, NoReturn, Union, cast, get_args, overload +from typing import TYPE_CHECKING, Any, Literal, NoReturn, TypeAlias, cast, get_args, overload -from typing_extensions import Self, TypeAlias +from typing_extensions import Self from cognite.client.data_classes._base import ( CogniteFilter, @@ -531,18 +532,18 @@ class _ColumnsSequenceUpdate(CogniteListUpdate): def add(self, value: dict | list[dict]) -> SequenceUpdate: single_item = not isinstance(value, list) if single_item: - value_list = cast(List[str], [value]) + value_list = cast(list[str], [value]) else: - value_list = cast(List[str], value) + value_list = cast(list[str], value) return self._add(value_list) def remove(self, value: str | list[str]) -> SequenceUpdate: single_item = not isinstance(value, list) if single_item: - value_list = cast(List[str], [value]) + value_list = cast(list[str], [value]) else: - value_list = cast(List[str], value) + value_list = cast(list[str], value) return self._remove([{"externalId": id} for id in value_list]) @@ -604,7 +605,7 @@ def as_write(self) -> SequenceWriteList: return SequenceWriteList([item.as_write() for item in self], cognite_client=self._get_cognite_client()) -RowValues: TypeAlias = Union[int, str, float, None] +RowValues: TypeAlias = int | str | float | None class SequenceRow(CogniteResource): @@ -880,7 +881,7 @@ def to_pandas( """Convert the sequence data list into a pandas DataFrame. Each column will be a sequence. Args: - key (Literal["id", "external_id"]): If concat = False, this decides which field to use as key in the dictionary. Defaults to "external_id". + key (Literal['id', 'external_id']): If concat = False, this decides which field to use as key in the dictionary. Defaults to "external_id". column_names (ColumnNames): Which field to use as column header. Can use any combination of "externalId", "columnExternalId", "id" and other characters as a template. concat (bool): Whether to concatenate the sequences into a single DataFrame or return a dictionary of DataFrames. Defaults to False. @@ -941,7 +942,7 @@ def metadata_key(key: str) -> list[str]: return ["metadata", key] -SortableSequencePropertyLike: TypeAlias = Union[SortableSequenceProperty, str, List[str]] +SortableSequencePropertyLike: TypeAlias = SortableSequenceProperty | str | list[str] class SequenceSort(CogniteSort): diff --git a/cognite/client/data_classes/shared.py b/cognite/client/data_classes/shared.py index 7638804c1b..1bf268e29a 100644 --- a/cognite/client/data_classes/shared.py +++ b/cognite/client/data_classes/shared.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Literal, Sequence +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any, Literal from typing_extensions import Self @@ -52,7 +53,7 @@ class GeometryFilter(CogniteFilter): """Represents the points, curves and surfaces in the coordinate space. Args: - type (Literal["Point", "MultiPoint", "LineString", "MultiLineString", "Polygon", "MultiPolygon"]): The geometry type. + type (Literal['Point', 'MultiPoint', 'LineString', 'MultiLineString', 'Polygon', 'MultiPolygon']): The geometry type. coordinates (Sequence[float] | Sequence[Sequence[float]] | Sequence[Sequence[Sequence[float]]] | Sequence[Sequence[Sequence[Sequence[float]]]]): An array of the coordinates of the geometry. The structure of the elements in this array is determined by the type of geometry. Point: diff --git a/cognite/client/data_classes/templates.py b/cognite/client/data_classes/templates.py index 1252231d10..18467465dd 100644 --- a/cognite/client/data_classes/templates.py +++ b/cognite/client/data_classes/templates.py @@ -2,9 +2,7 @@ from abc import ABC from collections import UserDict -from typing import TYPE_CHECKING, Any, ClassVar, Union, cast - -from typing_extensions import TypeAlias +from typing import TYPE_CHECKING, Any, ClassVar, TypeAlias, cast from cognite.client.data_classes._base import ( CogniteObjectUpdate, @@ -268,7 +266,7 @@ def __init__( self._cognite_client = cast("CogniteClient", cognite_client) -FieldResolvers: TypeAlias = Union[ConstantResolver, RawResolver, SyntheticTimeSeriesResolver, str, ViewResolver] +FieldResolvers: TypeAlias = ConstantResolver | RawResolver | SyntheticTimeSeriesResolver | str | ViewResolver class TemplateInstanceCore(WriteableCogniteResource["TemplateInstanceWrite"], ABC): diff --git a/cognite/client/data_classes/time_series.py b/cognite/client/data_classes/time_series.py index cc60278a8c..b9fdb9d504 100644 --- a/cognite/client/data_classes/time_series.py +++ b/cognite/client/data_classes/time_series.py @@ -2,11 +2,12 @@ import warnings from abc import ABC +from collections.abc import Sequence from datetime import datetime from enum import auto -from typing import TYPE_CHECKING, Any, List, Literal, Sequence, Union, cast +from typing import TYPE_CHECKING, Any, Literal, TypeAlias, cast -from typing_extensions import Self, TypeAlias +from typing_extensions import Self from cognite.client.data_classes._base import ( CogniteFilter, @@ -507,7 +508,7 @@ def metadata_key(key: str) -> list[str]: return ["metadata", key] -SortableTimeSeriesPropertyLike: TypeAlias = Union[SortableTimeSeriesProperty, str, List[str]] +SortableTimeSeriesPropertyLike: TypeAlias = SortableTimeSeriesProperty | str | list[str] class TimeSeriesSort(CogniteSort): diff --git a/cognite/client/data_classes/transformations/__init__.py b/cognite/client/data_classes/transformations/__init__.py index 2d1ba154f4..0c19497e40 100644 --- a/cognite/client/data_classes/transformations/__init__.py +++ b/cognite/client/data_classes/transformations/__init__.py @@ -2,8 +2,9 @@ import warnings from abc import ABC, abstractmethod +from collections.abc import Awaitable from copy import deepcopy -from typing import TYPE_CHECKING, Any, Awaitable, Literal, cast +from typing import TYPE_CHECKING, Any, Literal, cast from cognite.client.data_classes._base import ( CogniteFilter, @@ -471,7 +472,7 @@ class TransformationWrite(TransformationCore): ignore_null_fields (bool): Indicates how null values are handled on updates: ignore or set null. query (str | None): SQL query of the transformation. destination (TransformationDestination | None): see TransformationDestination for options. - conflict_mode (Literal["abort", "delete", "update", "upsert"] | None): What to do in case of id collisions: either "abort", "upsert", "update" or "delete" + conflict_mode (Literal['abort', 'delete', 'update', 'upsert'] | None): What to do in case of id collisions: either "abort", "upsert", "update" or "delete" is_public (bool): Indicates if the transformation is visible to all in project or only to the owner. source_oidc_credentials (OidcCredentials | None): Configure the transformation to authenticate with the given oidc credentials key on the destination. destination_oidc_credentials (OidcCredentials | None): Configure the transformation to authenticate with the given oidc credentials on the destination. diff --git a/cognite/client/data_classes/user_profiles.py b/cognite/client/data_classes/user_profiles.py index 397f5b0863..68dad137a7 100644 --- a/cognite/client/data_classes/user_profiles.py +++ b/cognite/client/data_classes/user_profiles.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Sequence, cast +from collections.abc import Sequence +from typing import TYPE_CHECKING, Any, cast from typing_extensions import Self diff --git a/cognite/client/data_classes/workflows.py b/cognite/client/data_classes/workflows.py index 70e5ef9085..1373e07dff 100644 --- a/cognite/client/data_classes/workflows.py +++ b/cognite/client/data_classes/workflows.py @@ -2,11 +2,11 @@ from abc import ABC, abstractmethod from collections import UserList -from collections.abc import Collection +from collections.abc import Collection, Sequence from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, ClassVar, Literal, Sequence, cast +from typing import TYPE_CHECKING, Any, ClassVar, Literal, TypeAlias, cast -from typing_extensions import Self, TypeAlias +from typing_extensions import Self from cognite.client.data_classes._base import ( CogniteObject, @@ -225,10 +225,7 @@ class TransformationTaskParameters(WorkflowTaskParameters): Args: external_id (str): The external ID of the transformation to be called. - concurrency_policy (Literal["fail", "restartAfterCurrent", "waitForCurrent"]): Determines the behavior of the task if the Transformation is already running.\n - * *fail*: The task fails if another instance of the Transformation is currently running.\n - * *waitForCurrent*: The task will pause and wait for the already running Transformation to complete. Once completed, the task is completed. This mode is useful for preventing redundant Transformation runs.\n - * *restartAfterCurrent*: The task waits for the ongoing Transformation to finish. After completion, the task restarts the Transformation. This mode ensures that the most recent data can be used by following tasks. + concurrency_policy (Literal['fail', 'restartAfterCurrent', 'waitForCurrent']): Determines the behavior of the task if the Transformation is already running. ``fail``: The task fails if another instance of the Transformation is currently running. ``waitForCurrent``: The task will pause and wait for the already running Transformation to complete. Once completed, the task is completed. This mode is useful for preventing redundant Transformation runs. ``restartAfterCurrent``: The task waits for the ongoing Transformation to finish. After completion, the task restarts the Transformation. This mode ensures that the most recent data can be used by following tasks. """ task_type = "transformation" @@ -261,7 +258,7 @@ class CDFTaskParameters(WorkflowTaskParameters): Args: resource_path (str): The resource path of the request. Note the path of the request which is prefixed by '{cluster}.cognitedata.com/api/v1/project/{project}' based on the cluster and project of the request. - method (Literal["GET", "POST", "PUT", "DELETE"] | str): The HTTP method of the request. + method (Literal['GET', 'POST', 'PUT', 'DELETE'] | str): The HTTP method of the request. query_parameters (dict | str | None): The query parameters of the request. Defaults to None. body (dict | str | None): The body of the request. Defaults to None. Limited to 1024KiB in size request_timeout_in_millis (int | str): The timeout of the request in milliseconds. Defaults to 10000. @@ -441,9 +438,7 @@ class WorkflowTask(CogniteResource): description (str | None): The description of the task. Defaults to None. retries (int): The number of retries for the task. Defaults to 3. timeout (int): The timeout of the task in seconds. Defaults to 3600. - on_failure (Literal["abortWorkflow", "skipTask"]): The policy to handle failures and timeouts. Defaults to *abortWorkflow*.\n - * *skipTask*: For both failures and timeouts, the task will retry until the retries are exhausted. After that, the Task is marked as COMPLETED_WITH_ERRORS and the subsequent tasks are executed.\n - * *abortWorkflow*: In case of failures, retries will be performed until exhausted. After which the task is marked as FAILED and the Workflow is marked the same. In the event of a timeout, no retries are undertaken; the task is marked as TIMED_OUT and the Workflow is marked as FAILED. + on_failure (Literal['abortWorkflow', 'skipTask']): The policy to handle failures and timeouts. Defaults to *abortWorkflow*. ``skipTask``: For both failures and timeouts, the task will retry until the retries are exhausted. After that, the Task is marked as COMPLETED_WITH_ERRORS and the subsequent tasks are executed. ``abortWorkflow``: In case of failures, retries will be performed until exhausted. After which the task is marked as FAILED and the Workflow is marked the same. In the event of a timeout, no retries are undertaken; the task is marked as TIMED_OUT and the Workflow is marked as FAILED. depends_on (list[str] | None): The external ids of the tasks that this task depends on. Defaults to None. """ diff --git a/cognite/client/exceptions.py b/cognite/client/exceptions.py index edcd0983b5..e9198813df 100644 --- a/cognite/client/exceptions.py +++ b/cognite/client/exceptions.py @@ -1,8 +1,9 @@ from __future__ import annotations import reprlib +from collections.abc import Callable from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from cognite.client._constants import _RUNNING_IN_BROWSER from cognite.client.utils import _json diff --git a/cognite/client/testing.py b/cognite/client/testing.py index ec209ed686..6428b06525 100644 --- a/cognite/client/testing.py +++ b/cognite/client/testing.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import Iterator from contextlib import contextmanager -from typing import Any, Iterator +from typing import Any from unittest.mock import MagicMock from cognite.client import CogniteClient diff --git a/cognite/client/utils/_auxiliary.py b/cognite/client/utils/_auxiliary.py index 50acdb1c0a..28a401234b 100644 --- a/cognite/client/utils/_auxiliary.py +++ b/cognite/client/utils/_auxiliary.py @@ -4,21 +4,17 @@ import math import platform import warnings +from collections.abc import Hashable, Iterable, Iterator, Sequence from threading import Thread from typing import ( TYPE_CHECKING, Any, - Hashable, - Iterable, - Iterator, - Sequence, + TypeGuard, TypeVar, overload, ) from urllib.parse import quote -from typing_extensions import TypeGuard - from cognite.client.utils import _json from cognite.client.utils._text import ( convert_all_keys_to_camel_case, diff --git a/cognite/client/utils/_concurrency.py b/cognite/client/utils/_concurrency.py index e0684ad66e..cc81c77c38 100644 --- a/cognite/client/utils/_concurrency.py +++ b/cognite/client/utils/_concurrency.py @@ -2,14 +2,13 @@ import functools from collections import UserList +from collections.abc import Callable, Sequence from concurrent.futures import CancelledError, Future, ThreadPoolExecutor, as_completed from typing import ( Any, - Callable, Literal, NoReturn, Protocol, - Sequence, TypeVar, ) @@ -126,7 +125,7 @@ def _raise_duplicated_error(self, unwrap_fn: Callable, **task_lists: list) -> No class TaskExecutor(Protocol): - def submit(self, fn: Callable[..., T_Result], *args: Any, **kwargs: Any) -> TaskFuture[T_Result]: ... + def submit(self, fn: Callable[..., T_Result], /, *args: Any, **kwargs: Any) -> TaskFuture[T_Result]: ... class TaskFuture(Protocol[T_Result]): @@ -160,7 +159,7 @@ def empty(self) -> Literal[True]: self._work_queue = AlwaysEmpty() - def submit(self, fn: Callable[..., T_Result], *args: Any, **kwargs: Any) -> SyncFuture: + def submit(self, fn: Callable[..., T_Result], /, *args: Any, **kwargs: Any) -> SyncFuture: return SyncFuture(fn, *args, **kwargs) @@ -219,16 +218,16 @@ def get_thread_pool_executor_or_raise(cls, max_workers: int) -> ThreadPoolExecut ) @classmethod - def get_data_modeling_executor(cls) -> ThreadPoolExecutor: + def get_data_modeling_executor(cls) -> TaskExecutor: """ The data modeling backend has different concurrency limits compared with the rest of CDF. Thus, we use a dedicated executor for these endpoints to match the backend requirements. Returns: - ThreadPoolExecutor: The data modeling executor. + TaskExecutor: The data modeling executor. """ if cls.uses_mainthread(): - return cls.get_mainthread_executor() # type: ignore [return-value] + return cls.get_mainthread_executor() global _DATA_MODELING_THREAD_POOL_EXECUTOR_SINGLETON try: @@ -276,7 +275,7 @@ def execute_tasks( tasks: Sequence[tuple | dict], max_workers: int, fail_fast: bool = False, - executor: ThreadPoolExecutor | None = None, + executor: TaskExecutor | None = None, ) -> TasksSummary: """ Will use a default executor if one is not passed explicitly. The default executor type uses a thread pool but can @@ -286,6 +285,10 @@ def execute_tasks( """ if ConcurrencySettings.uses_mainthread() or isinstance(executor, MainThreadExecutor): return execute_tasks_serially(func, tasks, fail_fast) + elif isinstance(executor, ThreadPoolExecutor) or executor is None: + pass + else: + raise TypeError("executor must be a ThreadPoolExecutor or MainThreadExecutor") executor = executor or ConcurrencySettings.get_thread_pool_executor(max_workers) task_order = [id(task) for task in tasks] diff --git a/cognite/client/utils/_identifier.py b/cognite/client/utils/_identifier.py index 1f0bc4220c..ddb293a43a 100644 --- a/cognite/client/utils/_identifier.py +++ b/cognite/client/utils/_identifier.py @@ -2,6 +2,7 @@ import numbers from abc import ABC +from collections.abc import Sequence from dataclasses import dataclass from typing import ( Any, @@ -10,7 +11,6 @@ Literal, NoReturn, Protocol, - Sequence, TypeVar, cast, overload, diff --git a/cognite/client/utils/_importing.py b/cognite/client/utils/_importing.py index 4d2b4aae4a..2f2d06c6a7 100644 --- a/cognite/client/utils/_importing.py +++ b/cognite/client/utils/_importing.py @@ -1,8 +1,9 @@ from __future__ import annotations import importlib +from collections.abc import Callable, Iterable, Iterator from types import ModuleType -from typing import TYPE_CHECKING, Callable, Iterable, Iterator, TypeVar, overload +from typing import TYPE_CHECKING, TypeVar, overload if TYPE_CHECKING: from concurrent.futures import Future diff --git a/cognite/client/utils/_pandas_helpers.py b/cognite/client/utils/_pandas_helpers.py index 90a319f4a7..c0cba2e3c3 100644 --- a/cognite/client/utils/_pandas_helpers.py +++ b/cognite/client/utils/_pandas_helpers.py @@ -2,11 +2,12 @@ import re import warnings +from collections.abc import Sequence from datetime import timezone from inspect import signature from itertools import chain from numbers import Integral -from typing import TYPE_CHECKING, Any, Literal, Sequence +from typing import TYPE_CHECKING, Any, Literal from cognite.client.exceptions import CogniteImportError from cognite.client.utils._importing import local_import diff --git a/cognite/client/utils/_pyodide_helpers.py b/cognite/client/utils/_pyodide_helpers.py index 26049cbaa2..95dd8e1839 100644 --- a/cognite/client/utils/_pyodide_helpers.py +++ b/cognite/client/utils/_pyodide_helpers.py @@ -2,7 +2,8 @@ import os import warnings -from typing import TYPE_CHECKING, Any, Callable, MutableMapping +from collections.abc import Callable, MutableMapping +from typing import TYPE_CHECKING, Any import cognite.client as cc # Do not import individual entities from cognite.client._http_client import _RetryTracker diff --git a/cognite/client/utils/_retry.py b/cognite/client/utils/_retry.py index 5389cc08df..8a3205c935 100644 --- a/cognite/client/utils/_retry.py +++ b/cognite/client/utils/_retry.py @@ -1,7 +1,7 @@ from __future__ import annotations +from collections.abc import Iterator from random import uniform -from typing import Iterator class Backoff(Iterator[float]): diff --git a/cognite/client/utils/_text.py b/cognite/client/utils/_text.py index 10c108efc7..2166be7b44 100644 --- a/cognite/client/utils/_text.py +++ b/cognite/client/utils/_text.py @@ -3,8 +3,9 @@ import random import re import string +from collections.abc import Iterator from functools import lru_cache -from typing import Any, Iterator +from typing import Any from cognite.client.utils.useful_types import SequenceNotStr diff --git a/cognite/client/utils/_time.py b/cognite/client/utils/_time.py index 25bc9e6ce6..fd3a6bcecb 100644 --- a/cognite/client/utils/_time.py +++ b/cognite/client/utils/_time.py @@ -5,21 +5,17 @@ import math import numbers import re -import sys import time from abc import ABC, abstractmethod from contextlib import suppress from datetime import datetime, timedelta, timezone +from itertools import pairwise from typing import TYPE_CHECKING, cast, overload +from zoneinfo import ZoneInfo, ZoneInfoNotFoundError from cognite.client.utils._importing import local_import from cognite.client.utils._text import to_camel_case -if sys.version_info >= (3, 9): - from zoneinfo import ZoneInfo, ZoneInfoNotFoundError -else: - from backports.zoneinfo import ZoneInfo, ZoneInfoNotFoundError - if TYPE_CHECKING: from datetime import tzinfo @@ -75,7 +71,7 @@ @functools.lru_cache(1) def get_zoneinfo_utc() -> ZoneInfo: - return ZoneInfo("UTC") # type: ignore [abstract] + return ZoneInfo("UTC") def parse_str_timezone_offset(tz: str) -> timezone: @@ -96,7 +92,7 @@ def parse_str_timezone_offset(tz: str) -> timezone: def parse_str_timezone(tz: str) -> timezone | ZoneInfo: try: - return ZoneInfo(tz) # type: ignore [abstract] + return ZoneInfo(tz) except ZoneInfoNotFoundError: try: return parse_str_timezone_offset(tz) @@ -613,7 +609,7 @@ def _to_fixed_utc_intervals_variable_unit_length( "end": end.to_pydatetime().astimezone(UTC), "granularity": f"{_check_max_granularity_limit((end - start) // timedelta(hours=1), granularity)}h", } - for start, end in zip(index[:-1], index[1:]) + for start, end in pairwise(index) ] @@ -630,7 +626,7 @@ def _to_fixed_utc_intervals_fixed_unit_length( transitions = [] freq = multiplier * GRANULARITY_IN_HOURS[unit] hour, zero = pd.Timedelta(hours=1), pd.Timedelta(0) - for t_start, t_end in zip(transition_raw[:-1], transition_raw[1:]): + for t_start, t_end in pairwise(transition_raw): if t_start.dst() == t_end.dst(): dst_adjustment = 0 elif t_start.dst() == hour and t_end.dst() == zero: @@ -697,7 +693,7 @@ def _timezones_are_equal(start_tz: tzinfo, end_tz: tzinfo) -> bool: return True with suppress(ValueError, ZoneInfoNotFoundError): # ValueError is raised for non-conforming keys (ZoneInfoNotFoundError is self-explanatory) - if ZoneInfo(str(start_tz)) is ZoneInfo(str(end_tz)): # type: ignore [abstract] + if ZoneInfo(str(start_tz)) is ZoneInfo(str(end_tz)): return True return False @@ -717,7 +713,7 @@ def validate_timezone(start: datetime, end: datetime) -> ZoneInfo: pd = local_import("pandas") if isinstance(start, pd.Timestamp): - return ZoneInfo(str(start_tz)) # type: ignore [abstract] + return ZoneInfo(str(start_tz)) raise ValueError("Only tz-aware pandas.Timestamp and datetime (must be using ZoneInfo) are supported.") diff --git a/cognite/client/utils/_validation.py b/cognite/client/utils/_validation.py index 88e4c0fbe8..7c9a70e457 100644 --- a/cognite/client/utils/_validation.py +++ b/cognite/client/utils/_validation.py @@ -1,21 +1,20 @@ from __future__ import annotations import functools -from typing import Any, Callable, Literal, Mapping, Sequence, Tuple, Union - -from typing_extensions import TypeAlias +from collections.abc import Callable, Mapping, Sequence +from typing import Any, Literal, TypeAlias from cognite.client.data_classes._base import T_CogniteSort from cognite.client.utils._auxiliary import is_unlimited from cognite.client.utils._identifier import Identifier, IdentifierSequence from cognite.client.utils.useful_types import SequenceNotStr -SortSpec: TypeAlias = Union[ - T_CogniteSort, - str, - Tuple[str, Literal["asc", "desc"]], - Tuple[str, Literal["asc", "desc"], Literal["auto", "first", "last"]], -] +SortSpec: TypeAlias = ( + T_CogniteSort + | str + | tuple[str, Literal["asc", "desc"]] + | tuple[str, Literal["asc", "desc"], Literal["auto", "first", "last"]] +) def assert_type(var: Any, var_name: str, types: list[type], allow_none: bool = False) -> None: diff --git a/cognite/client/utils/useful_types.py b/cognite/client/utils/useful_types.py index 4b49040364..6adcf6905d 100644 --- a/cognite/client/utils/useful_types.py +++ b/cognite/client/utils/useful_types.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Any, Iterator, Protocol, Sequence, SupportsIndex, TypeVar, overload, runtime_checkable +from collections.abc import Iterator, Sequence +from typing import Any, Protocol, SupportsIndex, TypeVar, overload, runtime_checkable _T_co = TypeVar("_T_co", covariant=True) diff --git a/poetry.lock b/poetry.lock index e26c94c958..496f93db53 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,24 +2,13 @@ [[package]] name = "alabaster" -version = "0.7.13" -description = "A configurable sidebar-enabled Sphinx theme" +version = "0.7.16" +description = "A light, configurable Sphinx theme" optional = false -python-versions = ">=3.6" -files = [ - {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, - {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, -] - -[[package]] -name = "appnope" -version = "0.1.4" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" files = [ - {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, - {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] [[package]] @@ -40,25 +29,6 @@ six = ">=1.12.0" astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] -[[package]] -name = "attrs" -version = "24.2.0" -description = "Classes Without Boilerplate" -optional = true -python-versions = ">=3.7" -files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - [[package]] name = "babel" version = "2.16.0" @@ -70,23 +40,9 @@ files = [ {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] -[package.dependencies] -pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} - [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] -[[package]] -name = "backcall" -version = "0.2.0" -description = "Specifications for callback functions passed in to an API" -optional = false -python-versions = "*" -files = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] - [[package]] name = "backports-tarfile" version = "1.2.0" @@ -102,34 +58,6 @@ files = [ docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"] -[[package]] -name = "backports-zoneinfo" -version = "0.2.1" -description = "Backport of the standard library zoneinfo module" -optional = false -python-versions = ">=3.6" -files = [ - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, - {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, -] - -[package.extras] -tzdata = ["tzdata"] - [[package]] name = "certifi" version = "2024.8.30" @@ -330,54 +258,6 @@ files = [ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = true -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "click-plugins" -version = "1.1.1" -description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -optional = true -python-versions = "*" -files = [ - {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, - {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, -] - -[package.dependencies] -click = ">=4.0" - -[package.extras] -dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] - -[[package]] -name = "cligj" -version = "0.7.2" -description = "Click params for commmand line interfaces to GeoJSON" -optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, <4" -files = [ - {file = "cligj-0.7.2-py3-none-any.whl", hash = "sha256:c1ca117dbce1fe20a5809dc96f01e1c2840f6dcc939b3ddbb1111bf330ba82df"}, - {file = "cligj-0.7.2.tar.gz", hash = "sha256:a4bc13d623356b373c2c27c53dbd9c68cae5d526270bfa71f6c6fa69669c6b27"}, -] - -[package.dependencies] -click = ">=4.0" - -[package.extras] -test = ["pytest-cov"] - [[package]] name = "colorama" version = "0.4.6" @@ -617,87 +497,21 @@ docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2. testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] -[[package]] -name = "fiona" -version = "1.10.1" -description = "Fiona reads and writes spatial data files" -optional = true -python-versions = ">=3.8" -files = [ - {file = "fiona-1.10.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:6e2a94beebda24e5db8c3573fe36110d474d4a12fac0264a3e083c75e9d63829"}, - {file = "fiona-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc7366f99bdc18ec99441b9e50246fdf5e72923dc9cbb00267b2bf28edd142ba"}, - {file = "fiona-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c32f424b0641c79f4036b96c2e80322fb181b4e415c8cd02d182baef55e6730"}, - {file = "fiona-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:9a67bd88918e87d64168bc9c00d9816d8bb07353594b5ce6c57252979d5dc86e"}, - {file = "fiona-1.10.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:98fe556058b370da07a84f6537c286f87eb4af2343d155fbd3fba5d38ac17ed7"}, - {file = "fiona-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:be29044d4aeebae92944b738160dc5f9afc4cdf04f551d59e803c5b910e17520"}, - {file = "fiona-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94bd3d448f09f85439e4b77c38b9de1aebe3eef24acc72bd631f75171cdfde51"}, - {file = "fiona-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:30594c0cd8682c43fd01e7cdbe000f94540f8fa3b7cb5901e805c88c4ff2058b"}, - {file = "fiona-1.10.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:7338b8c68beb7934bde4ec9f49eb5044e5e484b92d940bc3ec27defdb2b06c67"}, - {file = "fiona-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8c77fcfd3cdb0d3c97237965f8c60d1696a64923deeeb2d0b9810286cbe25911"}, - {file = "fiona-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:537872cbc9bda7fcdf73851c91bc5338fca2b502c4c17049ccecaa13cde1f18f"}, - {file = "fiona-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:41cde2c52c614457e9094ea44b0d30483540789e62fe0fa758c2a2963e980817"}, - {file = "fiona-1.10.1-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:a00b05935c9900678b2ca660026b39efc4e4b916983915d595964eb381763ae7"}, - {file = "fiona-1.10.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f78b781d5bcbbeeddf1d52712f33458775dbb9fd1b2a39882c83618348dd730f"}, - {file = "fiona-1.10.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29ceeb38e3cd30d91d68858d0817a1bb0c4f96340d334db4b16a99edb0902d35"}, - {file = "fiona-1.10.1-cp313-cp313-win_amd64.whl", hash = "sha256:15751c90e29cee1e01fcfedf42ab85987e32f0b593cf98d88ed52199ef5ca623"}, - {file = "fiona-1.10.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:6f1242f872dc33d3b4269dcaebf1838a359f9097e1cc848b0e11367bce010e4d"}, - {file = "fiona-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:65308b7a7e57fcc533de8a5855b0fce798faabc736d1340192dd8673ff61bc4e"}, - {file = "fiona-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:632bc146355af5ff0d77e34ebd1be5072d623b4aedb754b94a3d8c356c4545ac"}, - {file = "fiona-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:b7b4c3c97b1d64a1b3321577e9edaebbd36b64006e278f225f300c497cc87c35"}, - {file = "fiona-1.10.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b62aa8d5a0981bd33d81c247219b1eaa1e655e0a0682b3a4759fccc40954bb30"}, - {file = "fiona-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f4b19cb5bd22443ef439b39239272349023556994242a8f953a0147684e1c47f"}, - {file = "fiona-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa7e7e5ad252ef29905384bf92e7d14dd5374584b525632652c2ab8925304670"}, - {file = "fiona-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:4e82d18acbe55230e9cf8ede2a836d99ea96b7c0cc7d2b8b993e6c9f0ac14dc2"}, - {file = "fiona-1.10.1.tar.gz", hash = "sha256:b00ae357669460c6491caba29c2022ff0acfcbde86a95361ea8ff5cd14a86b68"}, -] - -[package.dependencies] -attrs = ">=19.2.0" -certifi = "*" -click = ">=8.0,<9.0" -click-plugins = ">=1.0" -cligj = ">=0.5" -importlib-metadata = {version = "*", markers = "python_version < \"3.10\""} - -[package.extras] -all = ["fiona[calc,s3,test]"] -calc = ["pyparsing", "shapely"] -s3 = ["boto3 (>=1.3.1)"] -test = ["aiohttp", "fiona[s3]", "fsspec", "pytest (>=7)", "pytest-cov", "pytz"] - [[package]] name = "flake8" -version = "5.0.4" +version = "7.1.1" description = "the modular source code checker: pep8 pyflakes and co" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.8.1" files = [ - {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, - {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, + {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, + {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.9.0,<2.10.0" -pyflakes = ">=2.5.0,<2.6.0" - -[[package]] -name = "geopandas" -version = "0.13.2" -description = "Geographic pandas extensions" -optional = true -python-versions = ">=3.8" -files = [ - {file = "geopandas-0.13.2-py3-none-any.whl", hash = "sha256:101cfd0de54bcf9e287a55b5ea17ebe0db53a5e25a28bacf100143d0507cabd9"}, - {file = "geopandas-0.13.2.tar.gz", hash = "sha256:e5b56d9c20800c77bcc0c914db3f27447a37b23b2cd892be543f5001a694a968"}, -] - -[package.dependencies] -fiona = ">=1.8.19" -packaging = "*" -pandas = ">=1.1.0" -pyproj = ">=3.0.1" -shapely = ">=1.7.1" +pycodestyle = ">=2.12.0,<2.13.0" +pyflakes = ">=3.2.0,<3.3.0" [[package]] name = "geopandas" @@ -722,17 +536,6 @@ shapely = ">=2.0.0" all = ["GeoAlchemy2", "SQLAlchemy (>=1.3)", "folium", "geopy", "mapclassify", "matplotlib (>=3.5.0)", "psycopg-binary (>=3.1.0)", "pyarrow (>=8.0.0)", "xyzservices"] dev = ["black", "codecov", "pre-commit", "pytest (>=3.1.0)", "pytest-cov", "pytest-xdist"] -[[package]] -name = "graphlib-backport" -version = "1.1.0" -description = "Backport of the Python 3.9 graphlib module for Python 3.6+" -optional = false -python-versions = ">=3.6,<4.0" -files = [ - {file = "graphlib_backport-1.1.0-py3-none-any.whl", hash = "sha256:eccacf9f2126cdf89ce32a6018c88e1ecd3e4898a07568add6e1907a439055ba"}, - {file = "graphlib_backport-1.1.0.tar.gz", hash = "sha256:00a7888b21e5393064a133209cb5d3b3ef0a2096cf023914c9d778dff5644125"}, -] - [[package]] name = "icdiff" version = "2.0.7" @@ -806,28 +609,6 @@ perf = ["ipython"] test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] -[[package]] -name = "importlib-resources" -version = "6.4.5" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717"}, - {file = "importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065"}, -] - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "zipp (>=3.17)"] -type = ["pytest-mypy"] - [[package]] name = "iniconfig" version = "2.0.0" @@ -841,42 +622,41 @@ files = [ [[package]] name = "ipython" -version = "8.12.3" +version = "8.27.0" description = "IPython: Productive Interactive Computing" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" files = [ - {file = "ipython-8.12.3-py3-none-any.whl", hash = "sha256:b0340d46a933d27c657b211a329d0be23793c36595acf9e6ef4164bc01a1804c"}, - {file = "ipython-8.12.3.tar.gz", hash = "sha256:3910c4b54543c2ad73d06579aa771041b7d5707b033bd488669b4cf544e3b363"}, + {file = "ipython-8.27.0-py3-none-any.whl", hash = "sha256:f68b3cb8bde357a5d7adc9598d57e22a45dfbea19eb6b98286fa3b288c9cd55c"}, + {file = "ipython-8.27.0.tar.gz", hash = "sha256:0b99a2dc9f15fd68692e898e5568725c6d49c527d36a9fb5960ffbdeaa82ff7e"}, ] [package.dependencies] -appnope = {version = "*", markers = "sys_platform == \"darwin\""} -backcall = "*" colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -pickleshare = "*" -prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt-toolkit = ">=3.0.41,<3.1.0" pygments = ">=2.4.0" stack-data = "*" -traitlets = ">=5" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} +traitlets = ">=5.13.0" +typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} [package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"] kernel = ["ipykernel"] +matplotlib = ["matplotlib"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] +test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] [[package]] name = "jaraco-classes" @@ -1000,7 +780,6 @@ files = [ [package.dependencies] importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} -importlib-resources = {version = "*", markers = "python_version < \"3.9\""} "jaraco.classes" = "*" "jaraco.context" = "*" "jaraco.functools" = "*" @@ -1287,43 +1066,6 @@ files = [ {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[[package]] -name = "numpy" -version = "1.24.4" -description = "Fundamental package for array computing in Python" -optional = true -python-versions = ">=3.8" -files = [ - {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, - {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, - {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, - {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, - {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, - {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, - {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, - {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, - {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, - {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, - {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, -] - [[package]] name = "numpy" version = "1.26.4" @@ -1396,69 +1138,6 @@ files = [ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] -[[package]] -name = "pandas" -version = "2.0.3" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = true -python-versions = ">=3.8" -files = [ - {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, - {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, - {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, - {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, - {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, - {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, - {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, - {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, - {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, - {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, - {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, -] - -[package.dependencies] -numpy = {version = ">=1.20.3", markers = "python_version < \"3.10\""} -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.1" - -[package.extras] -all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] -aws = ["s3fs (>=2021.08.0)"] -clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] -compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] -computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2021.07.0)"] -gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] -hdf5 = ["tables (>=3.6.1)"] -html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] -mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] -spss = ["pyreadstat (>=1.1.2)"] -sql-other = ["SQLAlchemy (>=1.4.16)"] -test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.6.3)"] - [[package]] name = "pandas" version = "2.2.3" @@ -1588,17 +1267,6 @@ files = [ [package.dependencies] ptyprocess = ">=0.5" -[[package]] -name = "pickleshare" -version = "0.7.5" -description = "Tiny 'shelve'-like database with concurrency support" -optional = false -python-versions = "*" -files = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] - [[package]] name = "pip" version = "24.2" @@ -1668,13 +1336,13 @@ files = [ [[package]] name = "pre-commit" -version = "3.5.0" +version = "3.8.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, - {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, + {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, + {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, ] [package.dependencies] @@ -1745,13 +1413,13 @@ tests = ["pytest"] [[package]] name = "pycodestyle" -version = "2.9.1" +version = "2.12.1" description = "Python style guide checker" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, - {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, ] [[package]] @@ -1767,13 +1435,13 @@ files = [ [[package]] name = "pyflakes" -version = "2.5.0" +version = "3.2.0" description = "passive checker of Python programs" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, - {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] [[package]] @@ -1872,53 +1540,6 @@ dev = ["cython"] geopandas = ["geopandas"] test = ["pytest", "pytest-cov"] -[[package]] -name = "pyproj" -version = "3.5.0" -description = "Python interface to PROJ (cartographic projections and coordinate transformations library)" -optional = true -python-versions = ">=3.8" -files = [ - {file = "pyproj-3.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6475ce653880938468a1a1b7321267243909e34b972ba9e53d5982c41d555918"}, - {file = "pyproj-3.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61e4ad57d89b03a7b173793b31bca8ee110112cde1937ef0f42a70b9120c827d"}, - {file = "pyproj-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bdd2021bb6f7f346bfe1d2a358aa109da017d22c4704af2d994e7c7ee0a7a53"}, - {file = "pyproj-3.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5674923351e76222e2c10c58b5e1ac119d7a46b270d822c463035971b06f724b"}, - {file = "pyproj-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd5e2b6aa255023c4acd0b977590f1f7cc801ba21b4d806fcf6dfac3474ebb83"}, - {file = "pyproj-3.5.0-cp310-cp310-win32.whl", hash = "sha256:6f316a66031a14e9c5a88c91f8b77aa97f5454895674541ed6ab630b682be35d"}, - {file = "pyproj-3.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:f7c2f4d9681e810cf40239caaca00079930a6d9ee6591139b88d592d36051d82"}, - {file = "pyproj-3.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7572983134e310e0ca809c63f1722557a040fe9443df5f247bf11ba887eb1229"}, - {file = "pyproj-3.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eccb417b91d0be27805dfc97550bfb8b7db94e9fe1db5ebedb98f5b88d601323"}, - {file = "pyproj-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:621d78a9d8bf4d06e08bef2471021fbcb1a65aa629ad4a20c22e521ce729cc20"}, - {file = "pyproj-3.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9a024370e917c899bff9171f03ea6079deecdc7482a146a2c565f3b9df134ea"}, - {file = "pyproj-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b7c2113c4d11184a238077ec85e31eda1dcc58ffeb9a4429830e0a7036e787d"}, - {file = "pyproj-3.5.0-cp311-cp311-win32.whl", hash = "sha256:a730f5b4c98c8a0f312437873e6e34dbd4cc6dc23d5afd91a6691c62724b1f68"}, - {file = "pyproj-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:e97573de0ab3bbbcb4c7748bc41f4ceb6da10b45d35b1a294b5820701e7c25f0"}, - {file = "pyproj-3.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2b708fd43453b985642b737d4a6e7f1d6a0ab1677ffa4e14cc258537b49224b0"}, - {file = "pyproj-3.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b60d93a200639e8367c6542a964fd0aa2dbd152f256c1831dc18cd5aa470fb8a"}, - {file = "pyproj-3.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38862fe07316ae12b79d82d298e390973a4f00b684f3c2d037238e20e00610ba"}, - {file = "pyproj-3.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71b65f2a38cd9e16883dbb0f8ae82bdf8f6b79b1b02975c78483ab8428dbbf2f"}, - {file = "pyproj-3.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b752b7d9c4b08181c7e8c0d9c7f277cbefff42227f34d3310696a87c863d9dd3"}, - {file = "pyproj-3.5.0-cp38-cp38-win32.whl", hash = "sha256:b937215bfbaf404ec8f03ca741fc3f9f2c4c2c5590a02ccddddd820ae3c71331"}, - {file = "pyproj-3.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:97ed199033c2c770e7eea2ef80ff5e6413426ec2d7ec985b869792f04ab95d05"}, - {file = "pyproj-3.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:052c49fce8b5d55943a35c36ccecb87350c68b48ba95bc02a789770c374ef819"}, - {file = "pyproj-3.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1507138ea28bf2134d31797675380791cc1a7156a3aeda484e65a78a4aba9b62"}, - {file = "pyproj-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c02742ef3d846401861a878a61ef7ad911ea7539d6cc4619ddb52dbdf7b45aee"}, - {file = "pyproj-3.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:385b0341861d3ebc8cad98337a738821dcb548d465576527399f4955ca24b6ed"}, - {file = "pyproj-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fe6bb1b68a35d07378d38be77b5b2f8dd2bea5910c957bfcc7bee55988d3910"}, - {file = "pyproj-3.5.0-cp39-cp39-win32.whl", hash = "sha256:5c4b85ac10d733c42d73a2e6261c8d6745bf52433a31848dd1b6561c9a382da3"}, - {file = "pyproj-3.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:1798ff7d65d9057ebb2d017ffe8403268b8452f24d0428b2140018c25c7fa1bc"}, - {file = "pyproj-3.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d711517a8487ef3245b08dc82f781a906df9abb3b6cb0ce0486f0eeb823ca570"}, - {file = "pyproj-3.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:788a5dadb532644a64efe0f5f01bf508c821eb7e984f13a677d56002f1e8a67a"}, - {file = "pyproj-3.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73f7960a97225812f9b1d7aeda5fb83812f38de9441e3476fcc8abb3e2b2f4de"}, - {file = "pyproj-3.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fde5ece4d2436b5a57c8f5f97b49b5de06a856d03959f836c957d3e609f2de7e"}, - {file = "pyproj-3.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e08db25b61cf024648d55973cc3d1c3f1d0818fabf594d5f5a8e2318103d2aa0"}, - {file = "pyproj-3.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a87b419a2a352413fbf759ecb66da9da50bd19861c8f26db6a25439125b27b9"}, - {file = "pyproj-3.5.0.tar.gz", hash = "sha256:9859d1591c1863414d875ae0759e72c2cffc01ab989dc64137fbac572cc81bf6"}, -] - -[package.dependencies] -certifi = "*" - [[package]] name = "pyproj" version = "3.6.1" @@ -2099,7 +1720,7 @@ cli = ["click (>=5.0)"] name = "pytz" version = "2024.2" description = "World timezone definitions, modern and historical" -optional = false +optional = true python-versions = "*" files = [ {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, @@ -2298,7 +1919,6 @@ files = [ [package.dependencies] markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] @@ -2415,7 +2035,6 @@ babel = ">=2.9" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} docutils = ">=0.14,<0.20" imagesize = ">=1.3" -importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} Jinja2 = ">=3.0" packaging = ">=21.0" Pygments = ">=2.12" @@ -2454,47 +2073,50 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.4" +version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, - {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +version = "2.0.0" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.1" +version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, - {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] [[package]] @@ -2527,32 +2149,34 @@ test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +version = "2.0.0" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["defusedxml (>=0.7.1)", "pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.5" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +version = "2.0.0" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" files = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, ] [package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] @@ -2809,16 +2433,16 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", type = ["pytest-mypy"] [extras] -all = ["PyYAML", "geopandas", "geopandas", "numpy", "numpy", "numpy", "pandas", "pandas", "pip", "shapely", "sympy"] +all = ["PyYAML", "geopandas", "numpy", "numpy", "pandas", "pip", "shapely", "sympy"] functions = ["pip"] -geo = ["geopandas", "geopandas", "shapely"] -numpy = ["numpy", "numpy", "numpy"] -pandas = ["pandas", "pandas"] +geo = ["geopandas", "shapely"] +numpy = ["numpy", "numpy"] +pandas = ["pandas"] pyodide = ["pyodide-http", "tzdata"] sympy = ["sympy"] yaml = ["PyYAML"] [metadata] lock-version = "2.0" -python-versions = "^3.8" -content-hash = "0b2715ff65cbb818ab2f2bb5b4b0051bbe91019998e202fdc2399d643c7f2608" +python-versions = "^3.10" +content-hash = "e212ee1ce399963d8c834d166a38105333d0713f9d95cebaffe73088984512b2" diff --git a/pyproject.toml b/pyproject.toml index a6a11b2cb8..c04303b813 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,7 @@ packages = [{ include="cognite", from="." }] "scripts/*" = ["T201"] [tool.poetry.dependencies] -python = "^3.8" +python = "^3.10" requests = "^2.27" requests_oauthlib = "^1" @@ -31,19 +31,12 @@ backports-zoneinfo = { version = ">=0.2.1", python = "<3.9" } # Windows does not have a ANSI database and need tzdata... pyodide also needs it: tzdata = { version = ">=2024.1", markers = "platform_system == 'Windows' or platform_system == 'Emscripten'" } numpy = [ - { version = ">=1.20, <1.25", python = "~3.8", optional = true }, - { version = "^1.25", python = ">=3.9, <3.12", optional = true }, + { version = "^1.25", python = ">=3.10, <3.12", optional = true }, { version = "^1.26", python = "^3.12", optional = true }, ] sympy = { version = "*", optional = true } -pandas = [ - { version = ">=1.4, <2.1", python = "~3.8", optional = true }, - { version = ">=2.1", python = ">=3.9", optional = true }, -] -geopandas = [ - { version = ">=0.10, <0.14", python = "~3.8", optional = true }, - { version = ">=0.14", python = ">=3.9", optional = true }, -] +pandas = { version = ">=2.1", optional = true } +geopandas = { version = ">=0.14", optional = true } shapely = { version = ">=1.7.0", optional = true } pyodide-http = { version = "^0.2.1", optional = true } graphlib-backport = { version = "^1.0.0", python = "<3.9" } diff --git a/scripts/create_ts_for_integration_tests.py b/scripts/create_ts_for_integration_tests.py index d4af1b952a..5c3c889f39 100644 --- a/scripts/create_ts_for_integration_tests.py +++ b/scripts/create_ts_for_integration_tests.py @@ -1,6 +1,5 @@ import random import time -from typing import List, Tuple import numpy as np import pandas as pd @@ -108,7 +107,7 @@ def create_edge_case_if_not_exists(ts_api): create_if_not_exists(ts_api, ts_lst, [ts_dps.to_pandas(column_names="external_id", include_aggregate_name=False)]) -def create_dense_time_series() -> Tuple[List[TimeSeries], List[pd.DataFrame]]: +def create_dense_time_series() -> tuple[list[TimeSeries], list[pd.DataFrame]]: ts_add = (ts_lst := []).append df_add = (df_lst := []).append ts_add(TimeSeries(name=NAMES[0], external_id=NAMES[0], is_string=False, metadata={"offset": 1, "delta": 10})) @@ -326,7 +325,7 @@ def get_bad(): ) -def create_if_not_exists(ts_api: TimeSeriesAPI, ts_list: List[TimeSeries], df_lst: List[pd.DataFrame]) -> None: +def create_if_not_exists(ts_api: TimeSeriesAPI, ts_list: list[TimeSeries], df_lst: list[pd.DataFrame]) -> None: existing = { t.external_id for t in ts_api.retrieve_multiple(external_ids=[t.external_id for t in ts_list], ignore_unknown_ids=True) @@ -352,7 +351,7 @@ def create_if_not_exists(ts_api: TimeSeriesAPI, ts_list: List[TimeSeries], df_ls print(f"Inserted {inserted} series of datapoints") -def create_time_series(ts_api, ts_lst: List[TimeSeries], df_lst: List[pd.DataFrame]): +def create_time_series(ts_api, ts_lst: list[TimeSeries], df_lst: list[pd.DataFrame]): ts_api.create(ts_lst) print(f"Created {len(ts_lst)} ts") time.sleep(5) diff --git a/scripts/custom_checks/docstrings.py b/scripts/custom_checks/docstrings.py index 6d2759a6a1..3f023f1b22 100644 --- a/scripts/custom_checks/docstrings.py +++ b/scripts/custom_checks/docstrings.py @@ -132,12 +132,6 @@ def _extract_yields_return_annot(string): return "".join(letters) def _extract_annotations(self, method): - def fix_literal(string): - # Example: Union[Literal[('aaa', 'bbb')]] -> Union[Literal["aaa", "bbb"]] - if match := re.search(r"Literal\[(\((.*)\))\]", string): - return string.replace(match.group(1), match.group(2).replace("'", '"')) - return string - annots = {} if isinstance(method, property): method_signature = inspect.signature(lambda: ...) # just 'self' anyways @@ -160,7 +154,7 @@ def fix_literal(string): var_name = "*" + var_name elif param.kind is param.VAR_KEYWORD: var_name = "**" + var_name - annots[var_name] = fix_literal(str(param.annotation)) + annots[var_name] = str(param.annotation) return annots, return_annot diff --git a/scripts/custom_checks/version.py b/scripts/custom_checks/version.py index 6f01ffbe61..a38505b0a3 100644 --- a/scripts/custom_checks/version.py +++ b/scripts/custom_checks/version.py @@ -1,7 +1,9 @@ import re +from collections.abc import Iterator from datetime import datetime +from itertools import pairwise from pathlib import Path -from typing import Iterator, Match, Optional +from re import Match import toml from packaging.version import Version @@ -11,7 +13,7 @@ CWD = Path.cwd() -def pyproj_version_matches() -> Optional[str]: +def pyproj_version_matches() -> str | None: with (CWD / "pyproject.toml").open() as fh: version_in_pyproject = toml.load(fh)["tool"]["poetry"]["version"] @@ -28,7 +30,7 @@ def _parse_changelog() -> Iterator[Match[str]]: return re.finditer(r"##\s\[(\d+\.\d+\.\d+)\]\s-\s(\d+-\d+-\d+)", changelog) -def changelog_entry_version_matches() -> Optional[str]: +def changelog_entry_version_matches() -> str | None: match = next(_parse_changelog()) version = match.group(1) if version != __version__: @@ -40,7 +42,7 @@ def changelog_entry_version_matches() -> Optional[str]: return None -def changelog_entry_date() -> Optional[str]: +def changelog_entry_date() -> str | None: match = next(_parse_changelog()) try: datetime.strptime(date := match.group(2), "%Y-%m-%d") @@ -49,9 +51,9 @@ def changelog_entry_date() -> Optional[str]: return f"Date given in the newest entry in 'CHANGELOG.md', {date!r}, is not valid/parsable (YYYY-MM-DD)" -def version_number_is_increasing() -> Optional[str]: +def version_number_is_increasing() -> str | None: versions = [Version(match.group(1)) for match in _parse_changelog()] - for new, old in zip(versions[:-1], versions[1:]): + for new, old in pairwise(versions): if new < old: return f"Versions must be strictly increasing: {new} is not higher than the previous, {old}." return None diff --git a/tests/tests_integration/test_api/test_datapoints.py b/tests/tests_integration/test_api/test_datapoints.py index 2822c69cb6..56c2a7e09b 100644 --- a/tests/tests_integration/test_api/test_datapoints.py +++ b/tests/tests_integration/test_api/test_datapoints.py @@ -12,9 +12,10 @@ import random import re import unittest +from collections.abc import Callable, Iterator from contextlib import nullcontext as does_not_raise from datetime import datetime, timezone -from typing import Callable, Iterator, Literal +from typing import Literal from unittest.mock import patch import numpy as np diff --git a/tests/tests_unit/test_api/test_data_modeling/conftest.py b/tests/tests_unit/test_api/test_data_modeling/conftest.py index 3aa3a7fbd5..2e08cb8662 100644 --- a/tests/tests_unit/test_api/test_data_modeling/conftest.py +++ b/tests/tests_unit/test_api/test_data_modeling/conftest.py @@ -1,9 +1,7 @@ -from typing import Optional - from cognite.client.data_classes.data_modeling import View -def make_test_view(space: str, external_id: str, version: Optional[str], created_time: int = 1): +def make_test_view(space: str, external_id: str, version: str | None, created_time: int = 1): return View( space, external_id, diff --git a/tests/tests_unit/test_base.py b/tests/tests_unit/test_base.py index f17d137829..03b411e19b 100644 --- a/tests/tests_unit/test_base.py +++ b/tests/tests_unit/test_base.py @@ -1,9 +1,10 @@ from __future__ import annotations +from collections.abc import Callable from copy import deepcopy from decimal import Decimal from inspect import signature -from typing import Any, Callable +from typing import Any from unittest.mock import MagicMock import pytest diff --git a/tests/tests_unit/test_data_classes/test_data_models/test_aggregations.py b/tests/tests_unit/test_data_classes/test_data_models/test_aggregations.py index e49ae49004..c14fd107f7 100644 --- a/tests/tests_unit/test_data_classes/test_data_models/test_aggregations.py +++ b/tests/tests_unit/test_data_classes/test_data_models/test_aggregations.py @@ -1,4 +1,4 @@ -from typing import Iterator +from collections.abc import Iterator import pytest from _pytest.mark import ParameterSet diff --git a/tests/tests_unit/test_data_classes/test_data_models/test_filters.py b/tests/tests_unit/test_data_classes/test_data_models/test_filters.py index e747a8368b..91dfd6ee36 100644 --- a/tests/tests_unit/test_data_classes/test_data_models/test_filters.py +++ b/tests/tests_unit/test_data_classes/test_data_models/test_filters.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Iterator, Literal +from collections.abc import Iterator +from typing import TYPE_CHECKING, Any, Literal import pytest from _pytest.mark import ParameterSet diff --git a/tests/tests_unit/test_data_classes/test_data_models/test_instances.py b/tests/tests_unit/test_data_classes/test_data_models/test_instances.py index 3f62f9ce33..e2a321def3 100644 --- a/tests/tests_unit/test_data_classes/test_data_models/test_instances.py +++ b/tests/tests_unit/test_data_classes/test_data_models/test_instances.py @@ -1,7 +1,7 @@ from __future__ import annotations from datetime import date, datetime -from typing import Any, List, Union, cast +from typing import Any, cast import pytest @@ -115,7 +115,7 @@ def test_dump_and_load(self) -> None: start_time=datetime.fromisoformat("2021-01-01T00:00:00"), end_time=datetime.fromisoformat("2021-01-01T00:00:00"), other_nodes=cast( - List[Union[NodeId, DirectRelationReference]], + list[NodeId | DirectRelationReference], [ DirectRelationReference("space", "external_id"), NodeId("space", "external_id2"), diff --git a/tests/tests_unit/test_data_classes/test_data_models/test_queries.py b/tests/tests_unit/test_data_classes/test_data_models/test_queries.py index 8bb07933c8..7c57da0c7a 100644 --- a/tests/tests_unit/test_data_classes/test_data_models/test_queries.py +++ b/tests/tests_unit/test_data_classes/test_data_models/test_queries.py @@ -1,4 +1,5 @@ -from typing import Any, Dict, Iterator +from collections.abc import Iterator +from typing import Any import pytest from _pytest.mark import ParameterSet @@ -98,7 +99,7 @@ def test_dump(self, raw_data: dict, loaded: q.ResultSetExpression) -> None: def select_load_and_dump_equals_data() -> Iterator[ParameterSet]: - raw: Dict[str, Any] = {} + raw: dict[str, Any] = {} loaded = q.Select() yield pytest.param(raw, loaded, id="Empty") @@ -233,5 +234,5 @@ def query_load_yaml_data() -> Iterator[ParameterSet]: class TestQuery: @pytest.mark.parametrize("raw_data, expected", list(query_load_yaml_data())) def test_load_yaml(self, raw_data: str, expected: q.Query) -> None: - actual = q.Query.load_yaml(raw_data) + actual = q.Query.load(raw_data) assert actual.dump(camel_case=True) == expected.dump(camel_case=True) diff --git a/tests/tests_unit/test_data_classes/test_functions.py b/tests/tests_unit/test_data_classes/test_functions.py index bec965a376..67be7452cd 100644 --- a/tests/tests_unit/test_data_classes/test_functions.py +++ b/tests/tests_unit/test_data_classes/test_functions.py @@ -1,6 +1,5 @@ import datetime import re -from typing import List, Tuple import pytest @@ -90,7 +89,7 @@ def test_get_function_call_with_filter(self, cognite_client, mock_function_call_ class TestFunctionCallLog: @pytest.fixture(scope="class") - def entries(self) -> List[Tuple[datetime.datetime, str]]: + def entries(self) -> list[tuple[datetime.datetime, str]]: start_ts = datetime.datetime(2023, 10, 4, 10, 30, 4, 123000, tzinfo=datetime.timezone.utc) ms_delta = datetime.timedelta(milliseconds=100) return [(start_ts + i * ms_delta, f"line {i}") for i in range(10)] diff --git a/tests/tests_unit/test_http_client.py b/tests/tests_unit/test_http_client.py index 03e674a21c..cff2f852fe 100644 --- a/tests/tests_unit/test_http_client.py +++ b/tests/tests_unit/test_http_client.py @@ -1,4 +1,3 @@ -import socket from unittest.mock import MagicMock import pytest @@ -93,7 +92,7 @@ def test_read_timeout_errors(self): retry_tracker_factory=lambda _: retry_tracker, session=MagicMock( request=MagicMock( - side_effect=lambda *args, **kwargs: raise_exception_wrapped_as_in_requests_lib(socket.timeout()) + side_effect=lambda *args, **kwargs: raise_exception_wrapped_as_in_requests_lib(TimeoutError()) ) ), ) diff --git a/tests/tests_unit/test_utils/test_time.py b/tests/tests_unit/test_utils/test_time.py index 34ae4069cd..44d4ea156c 100644 --- a/tests/tests_unit/test_utils/test_time.py +++ b/tests/tests_unit/test_utils/test_time.py @@ -3,8 +3,9 @@ import platform import re import time +from collections.abc import Iterable from datetime import datetime, timedelta, timezone -from typing import TYPE_CHECKING, Iterable +from typing import TYPE_CHECKING from unittest import mock import pytest diff --git a/tests/utils.py b/tests/utils.py index 46b0c575b6..1189eccd44 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -2,6 +2,7 @@ import abc import collections.abc +import dataclasses import enum import gzip import importlib @@ -10,13 +11,13 @@ import os import random import string -import sys import typing -from collections import Counter +from collections.abc import Mapping from contextlib import contextmanager from datetime import timedelta, timezone from pathlib import Path -from typing import TYPE_CHECKING, Any, Literal, Mapping, TypeVar, cast, get_args, get_origin, get_type_hints +from types import UnionType +from typing import TYPE_CHECKING, Any, Literal, TypeVar, cast, get_args, get_origin, get_type_hints from cognite.client import CogniteClient from cognite.client._constants import MAX_VALID_INTERNAL_ID @@ -61,11 +62,7 @@ T_Type = TypeVar("T_Type", bound=type) -UNION_TYPES = {typing.Union} -if sys.version_info >= (3, 10): - from types import UnionType - - UNION_TYPES.add(UnionType) +UNION_TYPES = {typing.Union, UnionType} def all_subclasses(base: T_Type) -> list[T_Type]: @@ -472,23 +469,22 @@ def create_value(self, type_: Any, var_name: str | None = None) -> Any: elif container_type is typing.Literal: return self._random.choice(args) elif container_type in [ - typing.List, list, typing.Sequence, collections.abc.Sequence, collections.abc.Collection, ]: return [self.create_value(first_not_none) for _ in range(3)] - elif container_type in [typing.Dict, dict, collections.abc.MutableMapping, collections.abc.Mapping]: + elif container_type in [dict, collections.abc.MutableMapping, collections.abc.Mapping]: if first_not_none is None: return self.create_value(dict) key_type, value_type = args return { self.create_value(key_type): self.create_value(value_type) for _ in range(self._random.randint(1, 3)) } - elif container_type in [typing.Set, set]: + elif container_type is set: return set(self.create_value(first_not_none) for _ in range(self._random.randint(1, 3))) - elif container_type in [typing.Tuple, tuple]: + elif container_type is tuple: if any(arg is ... for arg in args): return tuple(self.create_value(first_not_none) for _ in range(self._random.randint(1, 3))) raise NotImplementedError(f"Tuple with multiple types is not supported. {self._error_msg}") @@ -542,8 +538,10 @@ def create_value(self, type_: Any, var_name: str | None = None) -> Any: return type_([self.create_value(type_._RESOURCE) for _ in range(self._random.randint(1, 3))]) elif inspect.isclass(type_): return self.create_instance(type_) + elif type(type_) is dataclasses.InitVar: + return self.create_value(type_.type) - raise NotImplementedError(f"Unsupported container type {container_type}. {self._error_msg}") + raise NotImplementedError(f"Unsupported {type_=} or {container_type=}. {self._error_msg}") def _random_string( self, @@ -596,63 +594,5 @@ def _create_type_hint_3_10( return eval(annotation, resource_module_vars, local_vars) except TypeError: # Python 3.10 Type Hint - return cls._type_hint_3_10_to_8(annotation, resource_module_vars, local_vars) - - @classmethod - def _type_hint_3_10_to_8( - cls, annotation: str, resource_module_vars: dict[str, Any], local_vars: dict[str, Any] - ) -> Any: - if cls._is_vertical_union(annotation): - alternatives = [ - cls._create_type_hint_3_10(a.strip(), resource_module_vars, local_vars) for a in annotation.split("|") - ] - return typing.Union[tuple(alternatives)] - elif annotation.startswith("dict[") and annotation.endswith("]"): - if Counter(annotation)[","] > 1: - key, rest = annotation[5:-1].split(",", 1) - return typing.Dict[ - key.strip(), cls._create_type_hint_3_10(rest.strip(), resource_module_vars, local_vars) - ] - key, value = annotation[5:-1].split(",") - return typing.Dict[ - cls._create_type_hint_3_10(key.strip(), resource_module_vars, local_vars), - cls._create_type_hint_3_10(value.strip(), resource_module_vars, local_vars), - ] - elif annotation.startswith("Mapping[") and annotation.endswith("]"): - if Counter(annotation)[","] > 1: - key, rest = annotation[8:-1].split(",", 1) - return typing.Mapping[ - key.strip(), cls._create_type_hint_3_10(rest.strip(), resource_module_vars, local_vars) - ] - key, value = annotation[8:-1].split(",") - return typing.Mapping[ - cls._create_type_hint_3_10(key.strip(), resource_module_vars, local_vars), - cls._create_type_hint_3_10(value.strip(), resource_module_vars, local_vars), - ] - elif annotation.startswith("Optional[") and annotation.endswith("]"): - return typing.Optional[cls._create_type_hint_3_10(annotation[9:-1], resource_module_vars, local_vars)] - elif annotation.startswith("list[") and annotation.endswith("]"): - return typing.List[cls._create_type_hint_3_10(annotation[5:-1], resource_module_vars, local_vars)] - elif annotation.startswith("tuple[") and annotation.endswith("]"): - return typing.Tuple[cls._create_type_hint_3_10(annotation[6:-1], resource_module_vars, local_vars)] - elif annotation.startswith("set[") and annotation.endswith("]"): - return typing.Set[cls._create_type_hint_3_10(annotation[4:-1], resource_module_vars, local_vars)] - elif annotation.startswith("typing.Sequence[") and annotation.endswith("]"): - # This is used in the Sequence data class file to avoid name collision - return typing.Sequence[cls._create_type_hint_3_10(annotation[16:-1], resource_module_vars, local_vars)] - elif annotation.startswith("Sequence[") and annotation.endswith("]"): - return typing.Sequence[cls._create_type_hint_3_10(annotation[9:-1], resource_module_vars, local_vars)] - elif annotation.startswith("Collection[") and annotation.endswith("]"): - return typing.Collection[cls._create_type_hint_3_10(annotation[11:-1], resource_module_vars, local_vars)] - raise NotImplementedError(f"Unsupported conversion of type hint {annotation!r}. {cls._error_msg}") - - @classmethod - def _is_vertical_union(cls, annotation: str) -> bool: - if "|" not in annotation: - return False - parts = [p.strip() for p in annotation.split("|")] - for part in parts: - counts = Counter(part) - if counts["["] != counts["]"]: - return False - return True + if annotation.startswith("Sequence[") and annotation.endswith("]"): + return typing.Sequence[cls._create_type_hint_3_10(annotation[9:-1], resource_module_vars, local_vars)]