From 2c3e8923fa38a14dff780e3a76f088d79b2ac59a Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Tue, 5 Dec 2023 09:23:18 +0100 Subject: [PATCH 01/90] Skip dataset validation if --dry-run is set (#195) * fix: skip dataset validation if --dry-run is set * build: changelog * tests: updated test --- CHANGELOG.cdf-tk.md | 5 +++++ cognite_toolkit/cdf_tk/load.py | 30 +++++++++++++++++------------- tests/test_cdf_tk/test_load.py | 2 +- 3 files changed, 23 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index a1852472c..d4955d410 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -15,6 +15,11 @@ Changes are grouped as follows: - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. +## [TBD] - 2023-12-TBD +### Fixed +- When running `cdf-tk deploy` with `--dry-run` a `ValueError` was raised if not all datasets were pre-existing. + This is now fixed by skipping dataset validation when running with `--dry-run`. + ## [0.1.0a3] - 2023-12-01 ### Changed diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index a48b86a5b..d5a14c153 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -232,7 +232,7 @@ def delete(self, ids: Sequence[T_ID]) -> int: def retrieve(self, ids: Sequence[T_ID]) -> T_ResourceList: return self.api_class.retrieve(ids) - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig) -> T_Resource | T_ResourceList: + def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> T_Resource | T_ResourceList: raw_yaml = load_yaml_inject_variables(filepath, ToolGlobals.environment_variables()) if isinstance(raw_yaml, list): return self.list_cls.load(raw_yaml) @@ -299,12 +299,12 @@ def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: def get_id(cls, item: Group) -> str: return item.name - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig) -> Group: + def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> Group: raw = load_yaml_inject_variables(filepath, ToolGlobals.environment_variables()) for capability in raw.get("capabilities", []): for _, values in capability.items(): if len(values.get("scope", {}).get("datasetScope", {}).get("ids", [])) > 0: - if self.load not in ["all_skipped_validation", "all_scoped_skipped_validation"]: + if not dry_run and self.load not in ["all_skipped_validation", "all_scoped_skipped_validation"]: values["scope"]["datasetScope"]["ids"] = [ ToolGlobals.verify_dataset(ext_id) for ext_id in values.get("scope", {}).get("datasetScope", {}).get("ids", []) @@ -313,7 +313,7 @@ def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig) -> Group: values["scope"]["datasetScope"]["ids"] = [-1] if len(values.get("scope", {}).get("extractionPipelineScope", {}).get("ids", [])) > 0: - if self.load not in ["all_skipped_validation", "all_scoped_skipped_validation"]: + if not dry_run and self.load not in ["all_skipped_validation", "all_scoped_skipped_validation"]: values["scope"]["extractionPipelineScope"]["ids"] = [ ToolGlobals.verify_extraction_pipeline(ext_id) for ext_id in values.get("scope", {}).get("extractionPipelineScope", {}).get("ids", []) @@ -546,13 +546,14 @@ def delete(self, ids: Sequence[str]) -> int: self.client.time_series.delete(external_id=ids, ignore_unknown_ids=True) return len(ids) - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig) -> TimeSeries | TimeSeriesList: + def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> TimeSeries | TimeSeriesList: resources = load_yaml_inject_variables(filepath, {}) if not isinstance(resources, list): resources = [resources] for resource in resources: if resource.get("dataSetExternalId") is not None: - resource["dataSetId"] = ToolGlobals.verify_dataset(resource.pop("dataSetExternalId")) + ds_external_id = resource.pop("dataSetExternalId") + resource["dataSetId"] = ToolGlobals.verify_dataset(ds_external_id) if not dry_run else -1 return TimeSeriesList.load(resources) @@ -579,7 +580,7 @@ def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: def get_id(self, item: Transformation) -> str: return item.external_id - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig) -> Transformation: + def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> Transformation: raw = load_yaml_inject_variables(filepath, ToolGlobals.environment_variables()) # The `authentication` key is custom for this template: source_oidc_credentials = raw.get("authentication", {}).get("read") or raw.get("authentication") or {} @@ -653,7 +654,7 @@ def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: scope, ) - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig) -> Path: + def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> Path: return filepath @classmethod @@ -716,10 +717,11 @@ def delete(self, ids: Sequence[str]) -> int: return len(ids) return 0 - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig) -> ExtractionPipeline: + def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> ExtractionPipeline: resource = load_yaml_inject_variables(filepath, {}) if resource.get("dataSetExternalId") is not None: - resource["dataSetId"] = ToolGlobals.verify_dataset(resource.pop("dataSetExternalId")) + ds_exterla_id = resource.pop("dataSetExternalId") + resource["dataSetId"] = ToolGlobals.verify_dataset(ds_exterla_id) if not dry_run else -1 return ExtractionPipeline.load(resource) def create( @@ -770,7 +772,9 @@ def delete(self, ids: Sequence[str]) -> int: self.client.files.delete(external_id=ids) return len(ids) - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig) -> FileMetadata | FileMetadataList: + def load_resource( + self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool + ) -> FileMetadata | FileMetadataList: try: files = FileMetadataList( [FileMetadata.load(load_yaml_inject_variables(filepath, ToolGlobals.environment_variables()))] @@ -805,7 +809,7 @@ def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig) -> FileMetad raise FileNotFoundError(f"Could not find file {file.name} referenced in filepath {filepath.name}") if isinstance(file.data_set_id, str): # Replace external_id with internal id - file.data_set_id = ToolGlobals.verify_dataset(file.data_set_id) + file.data_set_id = ToolGlobals.verify_dataset(file.data_set_id) if not dry_run else -1 return files def create( @@ -845,7 +849,7 @@ def drop_load_resources( else: filepaths = [file for file in path.glob("**/*")] - items = [loader.load_resource(f, ToolGlobals) for f in filepaths] + items = [loader.load_resource(f, ToolGlobals, dry_run) for f in filepaths] nr_of_batches = len(items) nr_of_items = sum(len(item) if isinstance(item, Sized) else 1 for item in items) nr_of_deleted = 0 diff --git a/tests/test_cdf_tk/test_load.py b/tests/test_cdf_tk/test_load.py index a9d2a2f9f..974e90bb8 100644 --- a/tests/test_cdf_tk/test_load.py +++ b/tests/test_cdf_tk/test_load.py @@ -72,7 +72,7 @@ def test_upsert_data_set(cognite_client_approval: CogniteClient): cdf_tool.verify_capabilities.return_value = cognite_client_approval loader = DataSetsLoader.create_loader(cdf_tool) - loaded = loader.load_resource(DATA_FOLDER / "data_sets" / "1.my_datasets.yaml", cdf_tool) + loaded = loader.load_resource(DATA_FOLDER / "data_sets" / "1.my_datasets.yaml", cdf_tool, dry_run=False) assert len(loaded) == 2 first = DataSet.load(loaded[0].dump()) From f78a9a962979cc99cc034b73cacd3a87d5f47800 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 5 Dec 2023 11:01:10 +0100 Subject: [PATCH 02/90] Added support for nested metadata in datasets --- cognite_toolkit/cdf_tk/load.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index d5a14c153..47226c2d4 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -15,6 +15,7 @@ import io import itertools +import json import re from abc import ABC, abstractmethod from collections import Counter, defaultdict @@ -426,6 +427,15 @@ def fixup_resource(local: DataSet, remote: DataSet) -> DataSet: local.last_updated_time = remote.last_updated_time return local + def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> DataSetList: + resource = load_yaml_inject_variables(filepath, {}) + data_sets = list(resource) if isinstance(resource, dict) else resource + for data_set in data_sets: + if data_set.get("metadata"): + for key, value in data_set["metadata"].items(): + data_set["metadata"][key] = json.dumps(value) + return DataSetList.load(data_sets) + def create( self, items: Sequence[T_Resource], ToolGlobals: CDFToolConfig, drop: bool, filepath: Path ) -> T_ResourceList | None: From 0b2e0e8c51a6ffa6af1ad483e5a25e5ca80a6893 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 5 Dec 2023 11:26:29 +0100 Subject: [PATCH 03/90] Update cognite_toolkit/cdf_tk/load.py Co-authored-by: Anders Albert <60234212+doctrino@users.noreply.github.com> --- cognite_toolkit/cdf_tk/load.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 47226c2d4..afe7a845c 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -433,7 +433,7 @@ def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: boo for data_set in data_sets: if data_set.get("metadata"): for key, value in data_set["metadata"].items(): - data_set["metadata"][key] = json.dumps(value) + data_set["metadata"][key] = json.dumps(value) if isinstance(value, dict) else value return DataSetList.load(data_sets) def create( From 697154c4c04a46ff081ea8a7b0e107012081e0dc Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Tue, 5 Dec 2023 17:16:18 +0100 Subject: [PATCH 04/90] Case Check (#193) * tests: Added failing tests * tests: failing test 2 * feat: Implemented validate raw * feat: Support nested * fix: handle missing expected * refactor: Handle nested * refactor; some cleanup * feat: Implemented validate into main loader loop * style: Nicer report * build; changelog" * refactor: moved validation to build * refactor; finish move to build cmd of verify camel case * docs: Added a docstring to the _TypeHints module * refactor: Moved type hints to its own module --- CHANGELOG.cdf-tk.md | 3 + cognite_toolkit/cdf_tk/_get_type_hints.py | 133 ++++++++++++++++ cognite_toolkit/cdf_tk/load.py | 7 +- cognite_toolkit/cdf_tk/templates.py | 24 +++ cognite_toolkit/cdf_tk/utils.py | 146 +++++++++++++++++- .../datamodels/snake_cased_view_property.yaml | 12 ++ .../load_data/timeseries/wrong_case.yaml | 8 + tests/test_cdf_tk/test_utils.py | 33 +++- 8 files changed, 362 insertions(+), 4 deletions(-) create mode 100644 cognite_toolkit/cdf_tk/_get_type_hints.py create mode 100644 tests/test_cdf_tk/load_data/datamodels/snake_cased_view_property.yaml create mode 100644 tests/test_cdf_tk/load_data/timeseries/wrong_case.yaml diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index d4955d410..5fb5c4160 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -16,6 +16,9 @@ Changes are grouped as follows: - `Security` in case of vulnerabilities. ## [TBD] - 2023-12-TBD +### Added +- Warnings if a configuration file is using `snake_case` when then resource type is expecting `camelCase`. + ### Fixed - When running `cdf-tk deploy` with `--dry-run` a `ValueError` was raised if not all datasets were pre-existing. This is now fixed by skipping dataset validation when running with `--dry-run`. diff --git a/cognite_toolkit/cdf_tk/_get_type_hints.py b/cognite_toolkit/cdf_tk/_get_type_hints.py new file mode 100644 index 000000000..0c661a330 --- /dev/null +++ b/cognite_toolkit/cdf_tk/_get_type_hints.py @@ -0,0 +1,133 @@ +from __future__ import annotations + +import importlib +import inspect +import typing +from collections import Counter +from typing import Any, get_type_hints + +from cognite.client.data_classes._base import CogniteObject + + +class _TypeHints: + """ + This class is used to get type hints from the init function of a CogniteObject. + + After Python 3.10, type hints are treated as strings, so we need to evaluate them to get the actual type. + """ + + @classmethod + def get_type_hints_by_name(cls, signature, resource_cls: type[CogniteObject]) -> dict[str, Any]: + """ + Get type hints from the init function of a CogniteObject. + + Args: + signature: The signature of the init function. + resource_cls: The resource class to get type hints from. + """ + try: + type_hint_by_name = get_type_hints(resource_cls.__init__, localns=cls._type_checking) + except TypeError: + # Python 3.10 Type hints cannot be evaluated with get_type_hints, + # ref https://stackoverflow.com/questions/66006087/how-to-use-typing-get-type-hints-with-pep585-in-python3-8 + resource_module_vars = vars(importlib.import_module(resource_cls.__module__)) + resource_module_vars.update(cls._type_checking()) + type_hint_by_name = cls._get_type_hints_3_10(resource_module_vars, signature, vars(resource_cls)) + return type_hint_by_name + + @classmethod + def _type_checking(cls) -> dict[str, Any]: + """ + When calling the get_type_hints function, it imports the module with the function TYPE_CHECKING is set to False. + + This function takes all the special types used in data classes and returns them as a dictionary so it + can be used in the local namespaces. + """ + import numpy as np + import numpy.typing as npt + from cognite.client import CogniteClient + + NumpyDatetime64NSArray = npt.NDArray[np.datetime64] + NumpyInt64Array = npt.NDArray[np.int64] + NumpyFloat64Array = npt.NDArray[np.float64] + NumpyObjArray = npt.NDArray[np.object_] + return { + "CogniteClient": CogniteClient, + "NumpyDatetime64NSArray": NumpyDatetime64NSArray, + "NumpyInt64Array": NumpyInt64Array, + "NumpyFloat64Array": NumpyFloat64Array, + "NumpyObjArray": NumpyObjArray, + } + + @classmethod + def _get_type_hints_3_10( + cls, resource_module_vars: dict[str, Any], signature310: inspect.Signature, local_vars: dict[str, Any] + ) -> dict[str, Any]: + return { + name: cls._create_type_hint_3_10(parameter.annotation, resource_module_vars, local_vars) + for name, parameter in signature310.parameters.items() + if name != "self" + } + + @classmethod + def _create_type_hint_3_10( + cls, annotation: str, resource_module_vars: dict[str, Any], local_vars: dict[str, Any] + ) -> Any: + if annotation.endswith(" | None"): + annotation = annotation[:-7] + try: + return eval(annotation, resource_module_vars, local_vars) + except TypeError: + # Python 3.10 Type Hint + return cls._type_hint_3_10_to_8(annotation, resource_module_vars, local_vars) + + @classmethod + def _type_hint_3_10_to_8( + cls, annotation: str, resource_module_vars: dict[str, Any], local_vars: dict[str, Any] + ) -> Any: + if cls._is_vertical_union(annotation): + alternatives = [ + cls._create_type_hint_3_10(a.strip(), resource_module_vars, local_vars) for a in annotation.split("|") + ] + return typing.Union[tuple(alternatives)] + elif annotation.startswith("dict[") and annotation.endswith("]"): + if Counter(annotation)[","] > 1: + key, rest = annotation[5:-1].split(",", 1) + return dict[key.strip(), cls._create_type_hint_3_10(rest.strip(), resource_module_vars, local_vars)] + key, value = annotation[5:-1].split(",") + return dict[ + cls._create_type_hint_3_10(key.strip(), resource_module_vars, local_vars), + cls._create_type_hint_3_10(value.strip(), resource_module_vars, local_vars), + ] + elif annotation.startswith("Mapping[") and annotation.endswith("]"): + if Counter(annotation)[","] > 1: + key, rest = annotation[8:-1].split(",", 1) + return typing.Mapping[ + key.strip(), cls._create_type_hint_3_10(rest.strip(), resource_module_vars, local_vars) + ] + key, value = annotation[8:-1].split(",") + return typing.Mapping[ + cls._create_type_hint_3_10(key.strip(), resource_module_vars, local_vars), + cls._create_type_hint_3_10(value.strip(), resource_module_vars, local_vars), + ] + elif annotation.startswith("Optional[") and annotation.endswith("]"): + return typing.Optional[cls._create_type_hint_3_10(annotation[9:-1], resource_module_vars, local_vars)] + elif annotation.startswith("list[") and annotation.endswith("]"): + return list[cls._create_type_hint_3_10(annotation[5:-1], resource_module_vars, local_vars)] + elif annotation.startswith("tuple[") and annotation.endswith("]"): + return tuple[cls._create_type_hint_3_10(annotation[6:-1], resource_module_vars, local_vars)] + elif annotation.startswith("typing.Sequence[") and annotation.endswith("]"): + # This is used in the Sequence data class file to avoid name collision + return typing.Sequence[cls._create_type_hint_3_10(annotation[16:-1], resource_module_vars, local_vars)] + raise NotImplementedError(f"Unsupported conversion of type hint {annotation!r}. {cls._error_msg}") + + @classmethod + def _is_vertical_union(cls, annotation: str) -> bool: + if "|" not in annotation: + return False + parts = [p.strip() for p in annotation.split("|")] + for part in parts: + counts = Counter(part) + if counts["["] != counts["]"]: + return False + return True diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index afe7a845c..b52e03bc5 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -134,6 +134,7 @@ class Loader(ABC, Generic[T_ID, T_Resource, T_ResourceList]): folder_name: str resource_cls: type[CogniteResource] list_cls: type[CogniteResourceList] + identifier_key: str = "externalId" dependencies: frozenset[Loader] = frozenset() def __init__(self, client: CogniteClient): @@ -248,6 +249,7 @@ class AuthLoader(Loader[int, Group, GroupList]): folder_name = "auth" resource_cls = Group list_cls = GroupList + identifier_key = "name" resource_scopes = frozenset( { capabilities.IDScope, @@ -471,6 +473,7 @@ class RawLoader(Loader[RawTable, RawTable, list[RawTable]]): folder_name = "raw" resource_cls = RawTable list_cls = list[RawTable] + identifier_key = "table_name" data_file_types = frozenset({"csv", "parquet"}) @classmethod @@ -730,8 +733,8 @@ def delete(self, ids: Sequence[str]) -> int: def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> ExtractionPipeline: resource = load_yaml_inject_variables(filepath, {}) if resource.get("dataSetExternalId") is not None: - ds_exterla_id = resource.pop("dataSetExternalId") - resource["dataSetId"] = ToolGlobals.verify_dataset(ds_exterla_id) if not dry_run else -1 + ds_external_id = resource.pop("dataSetExternalId") + resource["dataSetId"] = ToolGlobals.verify_dataset(ds_external_id) if not dry_run else -1 return ExtractionPipeline.load(resource) def create( diff --git a/cognite_toolkit/cdf_tk/templates.py b/cognite_toolkit/cdf_tk/templates.py index c18d916a6..59131c5a7 100644 --- a/cognite_toolkit/cdf_tk/templates.py +++ b/cognite_toolkit/cdf_tk/templates.py @@ -1,5 +1,6 @@ from __future__ import annotations +import itertools import os import re import shutil @@ -9,6 +10,9 @@ import yaml from rich import print +from cognite_toolkit.cdf_tk.load import LOADER_BY_FOLDER_NAME +from cognite_toolkit.cdf_tk.utils import LoadWarning, validate_case_raw + TMPL_DIRS = ["common", "modules", "local_modules", "examples", "experimental"] # Add any other files below that should be included in a build EXCL_FILES = ["README.md"] @@ -377,6 +381,13 @@ def process_config_files( filepath_build=filepath, ): exit(1) + loader = LOADER_BY_FOLDER_NAME.get(filepath.parent.name) + if loader: + load_warnings = validate_case_raw( + parsed, loader.resource_cls, filepath, identifier_key=loader.identifier_key + ) + if load_warnings: + print(f" [bold yellow]WARNING:[/]{generate_warnings_report(load_warnings, indent=1)}") def build_config( @@ -408,3 +419,16 @@ def build_config( shutil.copyfile(Path(source_dir) / "local.yaml", Path(build_dir) / "local.yaml") shutil.copyfile(Path(source_dir) / "packages.yaml", Path(build_dir) / "packages.yaml") shutil.copyfile(Path(source_dir) / "default.packages.yaml", Path(build_dir) / "default.packages.yaml") + + +def generate_warnings_report(load_warnings: list[LoadWarning], indent: int = 0) -> str: + report = [""] + for (file, identifier, id_name), file_warnings in itertools.groupby( + sorted(load_warnings), key=lambda w: (w.filepath, w.id_value, w.id_name) + ): + report.append(f"{' '*indent}In File {str(file)!r}") + report.append(f"{' '*indent}In entry {id_name}={identifier!r}") + for warning in file_warnings: + report.append(f"{' '*(indent+1)}{warning!s}") + + return "\n".join(report) diff --git a/cognite_toolkit/cdf_tk/utils.py b/cognite_toolkit/cdf_tk/utils.py index 181689f56..1c415d6c9 100644 --- a/cognite_toolkit/cdf_tk/utils.py +++ b/cognite_toolkit/cdf_tk/utils.py @@ -13,21 +13,31 @@ # limitations under the License. from __future__ import annotations +import abc +import collections +import inspect import json import logging import os +import typing from collections.abc import Sequence +from dataclasses import dataclass +from functools import total_ordering from pathlib import Path -from typing import Any +from typing import Any, get_origin import yaml from cognite.client import ClientConfig, CogniteClient from cognite.client.config import global_config from cognite.client.credentials import OAuthClientCredentials, Token +from cognite.client.data_classes._base import CogniteObject from cognite.client.data_classes.capabilities import Capability from cognite.client.exceptions import CogniteAPIError, CogniteAuthError +from cognite.client.utils._text import to_camel_case, to_snake_case from rich import print +from cognite_toolkit.cdf_tk._get_type_hints import _TypeHints + logger = logging.getLogger(__name__) @@ -345,3 +355,137 @@ def load_yaml_inject_variables(filepath: Path, variables: dict[str, str]) -> dic continue content = content.replace("${%s}" % key, value) return yaml.safe_load(content) + + +@dataclass(frozen=True) +class LoadWarning: + filepath: Path + id_value: str + id_name: str + + +@total_ordering +@dataclass(frozen=True) +class CaseWarning(LoadWarning): + actual: str + expected: str + + def __lt__(self, other: CaseWarning) -> bool: + if not isinstance(other, CaseWarning): + return NotImplemented + return (self.filepath, self.id_value, self.expected, self.actual) < ( + other.filepath, + other.id_value, + other.expected, + other.actual, + ) + + def __eq__(self, other: CaseWarning) -> bool: + if not isinstance(other, CaseWarning): + return NotImplemented + return (self.filepath, self.id_value, self.expected, self.actual) == ( + other.filepath, + other.id_value, + other.expected, + other.actual, + ) + + def __str__(self): + return f"CaseWarning: Got {self.actual!r}. Did you mean {self.expected!r}?" + + +def validate_case_raw( + raw: dict[str, Any] | list[dict[str, Any]], + resource_cls: type[CogniteObject], + filepath: Path, + identifier_key: str = "externalId", +) -> list[CaseWarning]: + """Checks whether camel casing the raw data would match a parameter in the resource class. + + Args: + raw: The raw data to check. + resource_cls: The resource class to check against init method + filepath: The filepath of the raw data. This is used to pass to the warnings for easy + grouping of warnings. + identifier_key: The key to use as identifier. Defaults to "externalId". This is used to pass to the warnings + for easy grouping of warnings. + + Returns: + A list of CaseWarning objects. + + """ + return _validate_case_raw(raw, resource_cls, filepath, identifier_key) + + +def _validate_case_raw( + raw: dict[str, Any] | list[dict[str, Any]], + resource_cls: type[CogniteObject], + filepath: Path, + identifier_key: str = "externalId", + identifier_value: str = "", +) -> list[CaseWarning]: + warnings = [] + if isinstance(raw, list): + for item in raw: + warnings.extend(_validate_case_raw(item, resource_cls, filepath, identifier_key)) + return warnings + elif not isinstance(raw, dict): + return warnings + + signature = inspect.signature(resource_cls.__init__) + + is_base_class = inspect.isclass(resource_cls) and any(base is abc.ABC for base in resource_cls.__bases__) + if is_base_class: + # If it is a base class, it cannot be instantiated, so it can be any of the + # subclasses' parameters. + expected = { + to_camel_case(parameter) + for sub in resource_cls.__subclasses__() + for parameter in inspect.signature(sub.__init__).parameters.keys() + } - {"self"} + else: + expected = set(map(to_camel_case, signature.parameters.keys())) - {"self"} + + actual = set(raw.keys()) + actual_camel_case = set(map(to_camel_case, actual)) + snake_cased = actual - actual_camel_case + + if not identifier_value: + identifier_value = raw.get( + identifier_key, raw.get(to_snake_case(identifier_key), f"No identifier {identifier_key}") + ) + + for key in snake_cased: + if (camel_key := to_camel_case(key)) in expected: + warnings.append(CaseWarning(filepath, identifier_value, identifier_key, str(key), str(camel_key))) + + try: + type_hints_by_name = _TypeHints.get_type_hints_by_name(signature, resource_cls) + except Exception: + # If we cannot get type hints, we cannot check if the type is correct. + return warnings + + for key, value in raw.items(): + if not isinstance(value, dict): + continue + if (parameter := signature.parameters.get(to_snake_case(key))) and ( + type_hint := type_hints_by_name.get(parameter.name) + ): + if issubclass(type_hint, CogniteObject): + warnings.extend(_validate_case_raw(value, type_hint, filepath, identifier_key, identifier_value)) + continue + + container_type = get_origin(type_hint) + if container_type not in [dict, dict, collections.abc.MutableMapping, collections.abc.Mapping]: + continue + args = typing.get_args(type_hint) + if not args: + continue + container_key, container_value = args + if inspect.isclass(container_value) and issubclass(container_value, CogniteObject): + for sub_key, sub_value in value.items(): + warnings.extend( + _validate_case_raw(sub_value, container_value, filepath, identifier_key, identifier_value) + ) + + return warnings diff --git a/tests/test_cdf_tk/load_data/datamodels/snake_cased_view_property.yaml b/tests/test_cdf_tk/load_data/datamodels/snake_cased_view_property.yaml new file mode 100644 index 000000000..803d8f7ab --- /dev/null +++ b/tests/test_cdf_tk/load_data/datamodels/snake_cased_view_property.yaml @@ -0,0 +1,12 @@ +externalId: WorkItem +name: WorkItem +space: 'aSpace' +version: '1' +properties: + criticality: + container: + externalId: WorkItem + space: 'aSpace' + type: container + container_property_identifier: criticality + name: criticality diff --git a/tests/test_cdf_tk/load_data/timeseries/wrong_case.yaml b/tests/test_cdf_tk/load_data/timeseries/wrong_case.yaml new file mode 100644 index 000000000..2470df705 --- /dev/null +++ b/tests/test_cdf_tk/load_data/timeseries/wrong_case.yaml @@ -0,0 +1,8 @@ +- externalId: 'wrong_case' + name: 'Wrong cased data set' + is_string: true + metadata: + time_zone: 'Europe/Oslo' + unit: 'm' + is_step: false + description: This file has snake case for isStep and isString diff --git a/tests/test_cdf_tk/test_utils.py b/tests/test_cdf_tk/test_utils.py index d52eec357..30ff85299 100644 --- a/tests/test_cdf_tk/test_utils.py +++ b/tests/test_cdf_tk/test_utils.py @@ -1,19 +1,26 @@ +from pathlib import Path from unittest.mock import Mock, patch import pytest import yaml from cognite.client._api.iam import TokenAPI, TokenInspection +from cognite.client.data_classes import TimeSeries from cognite.client.data_classes.capabilities import ( DataSetsAcl, ProjectCapability, ProjectCapabilityList, ProjectsScope, ) +from cognite.client.data_classes.data_modeling import ViewApply from cognite.client.data_classes.iam import ProjectSpec from cognite.client.exceptions import CogniteAuthError from cognite.client.testing import CogniteClientMock -from cognite_toolkit.cdf_tk.utils import CDFToolConfig, load_yaml_inject_variables +from cognite_toolkit.cdf_tk.utils import CaseWarning, CDFToolConfig, load_yaml_inject_variables, validate_case_raw + +THIS_FOLDER = Path(__file__).resolve().parent + +DATA_FOLDER = THIS_FOLDER / "load_data" def mocked_init(self, client_name: str): @@ -70,3 +77,27 @@ def test_load_yaml_inject_variables(tmp_path) -> None: loaded = load_yaml_inject_variables(my_file, {"TEST": "my_injected_value"}) assert loaded["test"] == "my_injected_value" + + +def test_validate_raw() -> None: + raw_file = DATA_FOLDER / "timeseries" / "wrong_case.yaml" + + warnings = validate_case_raw(yaml.safe_load(raw_file.read_text()), TimeSeries, raw_file) + + assert len(warnings) == 2 + assert sorted(warnings) == sorted( + [ + CaseWarning(raw_file, "wrong_case", "externalId", "is_string", "isString"), + CaseWarning(raw_file, "wrong_case", "externalId", "is_step", "isStep"), + ] + ) + + +def test_validate_raw_nested() -> None: + raw_file = DATA_FOLDER / "datamodels" / "snake_cased_view_property.yaml" + warnings = validate_case_raw(yaml.safe_load(raw_file.read_text()), ViewApply, raw_file) + + assert len(warnings) == 1 + assert warnings == [ + CaseWarning(raw_file, "WorkItem", "externalId", "container_property_identifier", "containerPropertyIdentifier") + ] From e910dc4ea0624f65e9244553e53a598dcce6004d Mon Sep 17 00:00:00 2001 From: Jan Inge Bergseth <31886431+BergsethCognite@users.noreply.github.com> Date: Wed, 6 Dec 2023 07:45:23 +0100 Subject: [PATCH 05/90] update config --- .../auth/asset.extractor.groups.yaml | 22 +++++++++++ .../auth/asset.processing.groups.yaml | 37 +++++++++++++++++++ .../auth/asset.read.groups.yaml | 16 ++++++++ .../data_sets/dataset.yaml | 7 ++++ .../default.config.yaml | 33 +++++++++++++++++ .../source_asset_valhall_workmate.yaml | 10 +++++ .../source_asset_valhall_workmate_config.json | 35 ++++++++++++++++++ .../tr_asset_oid_workmate_asset_hierarchy.sql | 23 ++++++++++++ ...tr_asset_oid_workmate_asset_hierarchy.yaml | 23 ++++++++++++ 9 files changed, 206 insertions(+) create mode 100644 cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.extractor.groups.yaml create mode 100644 cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.processing.groups.yaml create mode 100644 cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.read.groups.yaml create mode 100644 cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml create mode 100644 cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/default.config.yaml create mode 100644 cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml create mode 100644 cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate_config.json create mode 100644 cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.sql create mode 100644 cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.extractor.groups.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.extractor.groups.yaml new file mode 100644 index 000000000..55b4510df --- /dev/null +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.extractor.groups.yaml @@ -0,0 +1,22 @@ +# This role is used for the extractor writing data to CDF RAW specified with the DB scope +name: 'gp_asset_{{location_name}}_extractor' +sourceId: '{{asset_location_extractor_group_source_id}}' +metadata: + origin: 'cdf-project-templates' + module_version: '{{module_version}}' +capabilities: + - rawAcl: + actions: + - READ + - WRITE + scope: + tableScope: { + dbsToTables: {'asset-{{location_name}}-{{source_name}}':{} } + } + - extractionConfigsAcl: + actions: + - READ + scope: + extractionPipelineScope: { + ids: ['ep_src_asset_{{location_name}}_{{source_name}}'] + } diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.processing.groups.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.processing.groups.yaml new file mode 100644 index 000000000..9dacb4f9e --- /dev/null +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.processing.groups.yaml @@ -0,0 +1,37 @@ +# This role is used for the processing of asset data, creating an asset hierarchy +name: 'gp_asset_{{location_name}}_processing' +sourceId: '{{asset_location_processing_group_source_id}}' +metadata: + origin: 'cdf-project-templates' + module_version: '{{module_version}}' +capabilities: + - rawAcl: + actions: + - READ + - WRITE + scope: + tableScope: { + dbsToTables: {'asset-{{location_name}}-{{source_name}}':{} } + } + - transformationsAcl: + actions: + - READ + - WRITE + scope: + all: {} + - sessionsAcl: + actions: + - LIST + - CREATE + - DELETE + scope: + all: {} + - assetsAcl: + actions: + - READ + - WRITE + scope: + datasetScope: { + ids: ['ds_asset_{{location_name}}'] + } + \ No newline at end of file diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.read.groups.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.read.groups.yaml new file mode 100644 index 000000000..b618cb69c --- /dev/null +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.read.groups.yaml @@ -0,0 +1,16 @@ +# This role is identical to the normal users, but is used to group user with additional InField feature capabilities for checklist admins. +# Typical Team captain / Supervisor users +name: 'gp_asset_{{location_name}}_read' +sourceId: '{{asset_location_read_group_source_id}}' +metadata: + origin: 'cdf-project-templates' + module_version: '{{module_version}}' +capabilities: + - assetsAcl: + actions: + - READ + scope: + datasetScope: { + ids: ['ds_asset_{{location_name}}'] + } + \ No newline at end of file diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml new file mode 100644 index 000000000..4dfe4fb50 --- /dev/null +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml @@ -0,0 +1,7 @@ +externalId: 'ds_asset_{{location_name}}' +name: 'asset:{{location_name}}' +description: 'Asset data for {{location_name}}' +metadata: + consoleSource: '{"names": ["{{source_name}}"]}' + rawTables: '[{"databaseName": "asset_{{location_name}}_{{source_name}}", "tableName": "assets"}]' + transformations: '[{"externalId": "tr_asset_{{location_name}}_{{source_name}}_asset_hierarchy", "type":"jetfire"}]' \ No newline at end of file diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/default.config.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/default.config.yaml new file mode 100644 index 000000000..73198c351 --- /dev/null +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/default.config.yaml @@ -0,0 +1,33 @@ +# This is the configuration file used for the example data from The Open Industrial Data (oid) +# +# The data originates from a single compressor on Aker BP’s Valhall oil platform +# in the North Sea. Aker BP selected the first stage compressor on the Valhall +# because it is a subsystem with clearly defined boundaries, rich in time series and maintenance data. +--- +# spesify the site/asset location where data comes from, ex 'valhall_oid' or if they are generic for all assets use 'all' +location_name: oid +module_version: '1' + +# spesify the name of the source making it possible to identify where the data orginates from, ex: 'workmate', 'sap', 'oracle',.. +source_name: workmate + +asset_dataset: ds_asset_oid +asset_raw_input_db: asset-oid-workmate +asset_raw_input_table: assets + +# source ID from Azure AD for the corresponding groups, ex 'c74797ce-9191-4a4a-9186-8fe21c54c3de' +asset_location_extractor_group_source_id: c74797ce-c49b-4ada-9186-8fe21c54c3de +asset_location_processing_group_source_id: c74797ce-c49b-4ada-9186-8fe21c54c3de +asset_location_read_group_source_id: c74797ce-c49b-4ada-9186-8fe21c54c3de + + +# Transformation credentials +clientId: ${IDP_CLIENT_ID} +clientSecret: ${IDP_CLIENT_SECRET} +tokenUri: ${IDP_TOKEN_URL} +# Optional: If idP requires providing the scopes +cdfProjectName: ${CDF_PROJECT} +scopes: ${IDP_SCOPES} +# Optional: If idP requires providing the audience +audience: ${IDP_AUDIENCE} + diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml new file mode 100644 index 000000000..f8363a1dc --- /dev/null +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml @@ -0,0 +1,10 @@ +--- +externalId: 'ep_src_asset_{{location_name}}_{{source_name}}' +name: 'src:asset:{{location_name}}:{{source_name}}' +dataSetExternalId: 'ds_asset_{{location_name}}' +description: 'Asset source extraction pipeline with configuration for DB extractor reading data from {{location_name}}:{{source_name}}' +rawTables: + - dbName: 'asset-{{location_name}}-{{source_name}}' + tableName: 'assets' +source: '{{source_name}}' +documentation: "The DB Extractor is a general database extractor that connects to a database, executes one or several queries and sends the result to CDF RAW.\n\nThe extractor connects to a database over ODBC, which means that you need an ODBC driver for your database. If you are running the Docker version of the extractor, ODBC drivers for MySQL, MS SQL, PostgreSql and Oracle DB are preinstalled in the image. See the example config for details on connection strings for these. If you are running the Windows exe version of the extractor, you must provide an ODBC driver yourself. These are typically provided by the database vendor.\n\nFurther documentation is available [here](./docs/documentation.md)\n\nFor information on development, consider the following guides:\n\n * [Development guide](guides/development.md)\n * [Release guide](guides/release.md)" \ No newline at end of file diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate_config.json b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate_config.json new file mode 100644 index 000000000..fecae60a6 --- /dev/null +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate_config.json @@ -0,0 +1,35 @@ +logger: + console: + level: INFO + file: + level: INFO + path: "file.log" +# List of databases +databases: + - type: odbc + name: postgres + connection-string: "DSN={MyPostgresDsn}" +# List of queries +queries: + - name: test-postgres + database: postgres + query: > + SELECT + + * + FROM + + mytable + WHERE + + {incremental_field} >= '{start_at}' + ORDER BY + + {incremental_field} ASC + incremental-field: "id" + initial-start: 0 + destination: + type: raw + database: "db-extractor" + table: "postgres" + primary-key: "{id}" diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.sql b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.sql new file mode 100644 index 000000000..be59d2abf --- /dev/null +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.sql @@ -0,0 +1,23 @@ +-- +-- Create Asset Hierarchy using Transformation +-- +-- Input data from RAW DB table (using example data) +-- +-- Root node has parentExternal id = '' +-- Transformation is connected to asset data set +-- All metadata expect selected fileds are added to metadata +-- +SELECT + sourceDb || ':' || tag as externalId, + if(parentTag is null, + '', + sourceDb || ':' ||parentTag) as parentExternalId, + tag as name, + sourceDb as source, + description, + dataset_id('{{asset_dataset}}') as dataSetId, + to_metadata_except( + array("sourceDb", "parentTag", "description"), *) + as metadata +FROM + `{{asset_raw_input_db}}`.`{{asset_raw_input_table}}` diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml new file mode 100644 index 000000000..69e65c1ac --- /dev/null +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml @@ -0,0 +1,23 @@ +externalId: 'tr_asset_{{location_name}}_{{source_name}}_asset_hierarchy' +name: 'asset:{{location_name}}:{{source_name}}:asset_hierarchy' +destination: + type: "asset_hierarchy" +ignoreNullFields: true +shared: true +conflictMode: upsert +# Specify credentials separately like this: +# You can also use different credentials for the running transformations than the ones you use to deploy +authentication: + clientId: {{clientId}} + clientSecret: {{clientSecret}} + tokenUri: {{tokenUri}} + # Optional: If idP requires providing the scopes + cdfProjectName: {{cdfProjectName}} + scopes: + - {{scopes}} + # Optional: If idP requires providing the audience + audience: {{audience}} +schedule: + # every hour + interval: '0 * * * *' + isPaused: true \ No newline at end of file From abd6ca1f0b4acd8356e2cc2e8dfe9acf8d778eea Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Wed, 6 Dec 2023 10:21:51 +0100 Subject: [PATCH 06/90] Refactor; Data Model Loaders (#187) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * refactor: Rename load_file to load_resource * refactor: introduce dependencies class variable and reorder to match it * refactor: use topological sort * refactor: reversed ruff * tests: update test data * refactor: clean up clean cmd * fix: accidental change of cmd line * refactor: introduce data modeling loaders * fix: minor introduced bug * refactor: Added missing spaces in cdf_asset_source_model and example_pump_data_model * tests: Regen test data * build: update changelog * refactor; Moved out fixtures for reuse * tests: Added testing for clean cmd * feat: Node and Edge loaders * tests: Regen test data * refactor; added drop-data as option * refactor: cleanup * refactor: remove graphql loader * refactor: support for multiple required capabilities * refactor: introduce space validation * build: changelog" * fix: skip dataset validation if --dry-run is set * build: changelog * tests: updated test * fix: load with ids' * refactor: Review feedback * refactor; reverted delete_instances and added ToolGlobals to state of loadersæ * tests: updated test data * build; extended checklist for PR * build; update changelog * refactor: fix issues with clean * fix; display of panel * refactor: introduce display name to distinguish between nodes and edges * refactor: Exit early if no items * refactor: upgrade case check to support data models * refactor: Review feedback * refactor: Set transformation dataset from transformation config * build: changelog * refactor: removed option from deploy * tests: updated test to no drop-data * ci: update ci to not drop data * refactor: review feedback --- .github/pull_request_template.md | 1 + .github/workflows/demo.yml | 10 +- CHANGELOG.cdf-tk.md | 9 +- CHANGELOG.templates.md | 6 +- cognite_toolkit/cdf.py | 120 +-- cognite_toolkit/cdf_tk/delete.py | 20 +- cognite_toolkit/cdf_tk/load.py | 887 ++++++++---------- cognite_toolkit/cdf_tk/templates.py | 32 +- cognite_toolkit/cdf_tk/utils.py | 31 +- .../data_models/2.ModelSpace.space.yaml | 3 + .../data_models/1.InstanceSpace.space.yaml | 3 - .../data_models/1.spaces.space.yaml | 6 + .../infield_apm_app_config.node.yaml | 38 +- .../cdf_infield_location/default.config.yaml | 1 + tests/conftest.py | 37 +- tests/test_approval_modules.py | 1 - .../cdf_apm_base.yaml | 16 - .../cdf_apm_simple_data_model.yaml | 12 - .../cdf_asset_source_model.yaml | 8 +- .../cdf_infield_common.yaml | 3 - .../cdf_infield_location.yaml | 3 - .../example_pump_data_model.yaml | 8 +- .../cdf_asset_source_model.yaml | 1 + .../cdf_infield_location.yaml | 4 + .../example_pump_data_model.yaml | 1 + .../datamodel_graphql/datamodel.graphql | 4 - tests/test_cdf_tk/test_load.py | 27 +- 27 files changed, 600 insertions(+), 692 deletions(-) create mode 100644 cognite_toolkit/experimental/cdf_asset_source_model/data_models/2.ModelSpace.space.yaml delete mode 100644 cognite_toolkit/experimental/example_pump_data_model/data_models/1.InstanceSpace.space.yaml create mode 100644 cognite_toolkit/experimental/example_pump_data_model/data_models/1.spaces.space.yaml delete mode 100644 tests/test_cdf_tk/load_data/datamodel_graphql/datamodel.graphql diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 226dd5eb8..5a591b761 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -3,6 +3,7 @@ Please describe the change you have made. ## Checklist: - [ ] Tests added/updated. +- [ ] Run Demo Job Locally. - [ ] Documentation updated. - [ ] Changelogs updated in [CHANGELOG.cdf-tk.md](https://github.com/cognitedata/cdf-project-templates/blob/main/CHANGELOG.cdf-tk.md). - [ ] Template changelogs updated in [CHANGELOG.templates.md](https://github.com/cognitedata/cdf-project-templates/blob/main/CHANGELOG.templates.md). diff --git a/.github/workflows/demo.yml b/.github/workflows/demo.yml index d2d9aef40..27f448e48 100644 --- a/.github/workflows/demo.yml +++ b/.github/workflows/demo.yml @@ -38,9 +38,9 @@ jobs: - name: "Build the templates" run: cdf-tk build --build-dir=./build --env=demo ./demo_project # be careful, this works as promised - #- name: "Delete existing resources including data" - # run: | - # cdf-tk clean --env=demo ./build +# - name: "Delete existing resources including data" +# run: | +# cdf-tk clean --env=demo ./build --include data_models - name: "Verify and create access rights" run: cdf-tk auth verify - name: "Allow some time for data modeling to finish syncing of deletions" @@ -48,7 +48,7 @@ jobs: sleep 30 - name: "Deploy the templates" run: | - cdf-tk deploy --drop --drop-data --env=demo ./build + cdf-tk deploy --drop --env=demo ./build - name: "Run transformations and other post-processing" run: | - poetry run python ./demo/postproc.py \ No newline at end of file + poetry run python ./demo/postproc.py diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index 5fb5c4160..71515b4fd 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -18,6 +18,13 @@ Changes are grouped as follows: ## [TBD] - 2023-12-TBD ### Added - Warnings if a configuration file is using `snake_case` when then resource type is expecting `camelCase`. +- Added support for validation of `space` for data models. +### Removed +- In the `deploy` command `drop_data` option has been removed. To drop data, use the `clean` command instead. +### Changed +- Require all spaces to be explicitly defined as separate .space.yaml file. +- The `data_set_id` for `Transformations` must now be set explicitly in the yaml config file for the `Transformation` + under the `data_set_id` key. Note that you also need to explicitly define the `data_set` in its own yaml config file. ### Fixed - When running `cdf-tk deploy` with `--dry-run` a `ValueError` was raised if not all datasets were pre-existing. @@ -30,7 +37,7 @@ Changes are grouped as follows: - Refactored load functionality. Loading raw tables and files now requires a `yaml` file with metadata. - Fix container comparison to detect identical containers when loading data models (without --drop flag). - Clean up error on resource does not exist when deleting (on `deploy --drop` or using clean command). - + ### Added - Support for loading `data_sets`. diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index 4fd403bfa..dbcc931a6 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -13,6 +13,10 @@ Changes are grouped as follows: - `Removed` for now removed features. - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. +- +## TBD - 2023-12-TBD +### Added +- Explicitly define model `space` in `experimental/cdf_asset_source_model/` and `experimental/example_pump_model/`. ## [0.2.0] - 2023-12-01 @@ -34,7 +38,7 @@ Changes are grouped as follows: - Add space yaml files for existing data models when explicit space definition was introduced. - Fix use of integer value in version for data models. - Fix wrong reference to `apm_simple` in `examples/cdf_apm_simple_data_model` and `modules/cdf_infield_location`. -- Examplify use of a single config yaml file for multiple file resources in `examples/cdf_oid_example_data/files/files.yaml`. +- Exemplify use of a single config yaml file for multiple file resources in `examples/cdf_oid_example_data/files/files.yaml`. ## [0.1.1] - 2023-11-23 diff --git a/cognite_toolkit/cdf.py b/cognite_toolkit/cdf.py index e0ff301bc..e99f9ca12 100755 --- a/cognite_toolkit/cdf.py +++ b/cognite_toolkit/cdf.py @@ -1,12 +1,10 @@ #!/usr/bin/env python -import itertools import shutil import tempfile import urllib import zipfile from collections.abc import Sequence from dataclasses import dataclass -from enum import Enum from graphlib import TopologicalSorter from importlib import resources from pathlib import Path @@ -25,8 +23,6 @@ LOADER_BY_FOLDER_NAME, AuthLoader, drop_load_resources, - load_datamodel, - load_nodes, ) from cognite_toolkit.cdf_tk.templates import build_config, read_environ_config from cognite_toolkit.cdf_tk.utils import CDFToolConfig @@ -38,15 +34,7 @@ app.add_typer(auth_app, name="auth") -# There enums should be removed when the load_datamodel function is refactored to use the LoaderCls. -class CDFDataTypes(str, Enum): - data_models = "data_models" - instances = "instances" - - -_AVAILABLE_DATA_TYPES: tuple[str] = tuple( - itertools.chain((type_.value for type_ in CDFDataTypes), LOADER_BY_FOLDER_NAME.keys()) -) +_AVAILABLE_DATA_TYPES: tuple[str] = tuple(LOADER_BY_FOLDER_NAME) # Common parameters handled in common callback @@ -215,14 +203,6 @@ def deploy( help="Whether to drop existing configurations, drop per resource if present", ), ] = False, - drop_data: Annotated[ - Optional[bool], - typer.Option( - "--drop-data", - "-D", - help="Whether to drop existing data, drop data if present (WARNING!! includes data from pipelines)", - ), - ] = False, dry_run: Annotated[ Optional[bool], typer.Option( @@ -253,7 +233,7 @@ def deploy( # Set environment variables from local.yaml read_environ_config(root_dir=build_dir, build_env=build_env, set_env_only=True) - typer.echo(Panel(f"[bold]Deploying config files from {build_dir} to environment {build_env}...[/]")) + print(Panel(f"[bold]Deploying config files from {build_dir} to environment {build_env}...[/]")) build_path = Path(build_dir) if not build_path.is_dir(): typer.echo( @@ -268,8 +248,9 @@ def deploy( # once with all_scoped_skipped_validation and once with resource_scoped_only selected_loaders = { LoaderCls: LoaderCls.dependencies - for folder_name, LoaderCls in LOADER_BY_FOLDER_NAME.items() + for folder_name, loader_classes in LOADER_BY_FOLDER_NAME.items() if folder_name in include and folder_name != "auth" and (build_path / folder_name).is_dir() + for LoaderCls in loader_classes } arguments = dict( @@ -277,6 +258,7 @@ def deploy( drop=drop, load=True, dry_run=dry_run, + drop_data=False, verbose=ctx.obj.verbose, ) @@ -291,38 +273,15 @@ def deploy( if ToolGlobals.failed: print("[bold red]ERROR: [/] Failure to deploy auth as expected.") exit(1) - if CDFDataTypes.data_models.value in include and (models_dir := Path(f"{build_dir}/data_models")).is_dir(): - load_datamodel( - ToolGlobals, - drop=drop, - drop_data=drop_data, - directory=models_dir, - delete_containers=drop_data, # Also delete properties that have been ingested (leaving empty instances) - delete_spaces=drop_data, # Also delete spaces if there are no empty instances (needs to be deleted separately) - dry_run=dry_run, - ) - if ToolGlobals.failed: - print("[bold red]ERROR: [/] Failure to load data models as expected.") - exit(1) - if CDFDataTypes.instances.value in include and (models_dir := Path(f"{build_dir}/data_models")).is_dir(): - load_nodes( - ToolGlobals, - directory=models_dir, - dry_run=dry_run, + for LoaderCls in TopologicalSorter(selected_loaders).static_order(): + drop_load_resources( + LoaderCls.create_loader(ToolGlobals), + build_path / LoaderCls.folder_name, + **arguments, ) if ToolGlobals.failed: - print("[bold red]ERROR: [/] Failure to load instances as expected.") + print(f"[bold red]ERROR: [/] Failure to load {LoaderCls.folder_name} as expected.") exit(1) - for LoaderCls in TopologicalSorter(selected_loaders).static_order(): - if LoaderCls.folder_name in include and (build_path / LoaderCls.folder_name).is_dir(): - drop_load_resources( - LoaderCls.create_loader(ToolGlobals), - build_path / LoaderCls.folder_name, - **arguments, - ) - if ToolGlobals.failed: - print(f"[bold red]ERROR: [/] Failure to load {LoaderCls.folder_name} as expected.") - exit(1) if "auth" in include and (directory := (Path(build_dir) / "auth")).is_dir(): # Last, we need to get all the scoped access, as the resources should now have been created. @@ -394,7 +353,7 @@ def clean( # Set environment variables from local.yaml read_environ_config(root_dir=build_dir, build_env=build_env, set_env_only=True) - print(Panel(f"[bold]Cleaning environment {build_env} based on config files from {build_dir}...[/]")) + Panel(f"[bold]Cleaning environment {build_env} based on config files from {build_dir}...[/]") build_path = Path(build_dir) if not build_path.is_dir(): typer.echo( @@ -408,55 +367,30 @@ def clean( # The 'auth' loader is excluded, as it is run at the end. selected_loaders = { LoaderCls: LoaderCls.dependencies - for folder_name, LoaderCls in LOADER_BY_FOLDER_NAME.items() + for folder_name, loader_classes in LOADER_BY_FOLDER_NAME.items() if folder_name in include and folder_name != "auth" and (build_path / folder_name).is_dir() + for LoaderCls in loader_classes } print(ToolGlobals.as_string()) - if CDFDataTypes.data_models in include and (models_dir := Path(f"{build_dir}/data_models")).is_dir(): - # We use the load_datamodel with only_drop=True to ensure that we get a clean - # deletion of the data model entities and instances. - load_datamodel( - ToolGlobals, - drop=True, - drop_data=True, - only_drop=True, - directory=models_dir, - delete_removed=True, - delete_spaces=True, - delete_containers=True, - dry_run=dry_run, - ) - elif CDFDataTypes.instances in include and (models_dir := Path(f"{build_dir}/data_models")).is_dir(): - load_datamodel( - ToolGlobals, - drop=False, - drop_data=True, - only_drop=True, - directory=models_dir, - delete_removed=False, - delete_spaces=False, - delete_containers=False, - dry_run=dry_run, - ) if ToolGlobals.failed: print("[bold red]ERROR: [/] Failure to delete data models as expected.") exit(1) for LoaderCls in reversed(list(TopologicalSorter(selected_loaders).static_order())): - if LoaderCls.folder_name in include and (Path(build_dir) / LoaderCls.folder_name).is_dir(): - drop_load_resources( - LoaderCls.create_loader(ToolGlobals), - build_path / LoaderCls.folder_name, - ToolGlobals, - drop=True, - load=False, - dry_run=dry_run, - verbose=ctx.obj.verbose, - ) - if ToolGlobals.failed: - print(f"[bold red]ERROR: [/] Failure to clean {LoaderCls.folder_name} as expected.") - exit(1) + drop_load_resources( + LoaderCls.create_loader(ToolGlobals), + build_path / LoaderCls.folder_name, + ToolGlobals, + drop=True, + load=False, + drop_data=True, + dry_run=dry_run, + verbose=ctx.obj.verbose, + ) + if ToolGlobals.failed: + print(f"[bold red]ERROR: [/] Failure to clean {LoaderCls.folder_name} as expected.") + exit(1) if "auth" in include and (directory := (Path(build_dir) / "auth")).is_dir(): drop_load_resources( AuthLoader.create_loader(ToolGlobals, target_scopes="all_scoped_skipped_validation"), diff --git a/cognite_toolkit/cdf_tk/delete.py b/cognite_toolkit/cdf_tk/delete.py index a41b5212d..b25b39ed4 100644 --- a/cognite_toolkit/cdf_tk/delete.py +++ b/cognite_toolkit/cdf_tk/delete.py @@ -32,18 +32,17 @@ def delete_instances( dry_run=False, delete_edges=True, delete_nodes=True, -) -> None: +) -> bool: """Delete instances in a space from CDF based on the space name Args: - space_name (str): The name of the space to delete instances from - dry_run (bool): Do not delete anything, just print what would have been deleted - delete_edges (bool): Delete all edges in the space - delete_nodes (bool): Delete all nodes in the space + space_name (str): The name of the space to delete instances from + dry_run (bool): Do not delete anything, just print what would have been deleted + delete_edges (bool): Delete all edges in the space + delete_nodes (bool): Delete all nodes in the space """ if space_name is None or len(space_name) == 0: - return - # TODO: Here we should really check on whether we have the Acl on the space, not yet implemented + return True client: CogniteClient = ToolGlobals.verify_client( capabilities={ "dataModelInstancesAcl": ["READ", "WRITE"], @@ -74,7 +73,7 @@ def delete_instances( except Exception as e: print(f"[bold red]ERROR: [/] Failed to delete edges in {space_name}.\n{e}") ToolGlobals.failed = True - return + return False print(f" Found {edge_count} edges and deleted {edge_delete} edges from space {space_name}.") if delete_nodes: print(" Deleting nodes...") @@ -96,8 +95,9 @@ def delete_instances( except Exception as e: print(f"[bold red]ERROR: [/] Failed to delete nodes in {space_name}.\n{e}") ToolGlobals.failed = True - return + return False print(f" Found {node_count} nodes and deleted {node_delete} nodes from {space_name}.") + return True def delete_containers(ToolGlobals: CDFToolConfig, dry_run=False, containers: ContainerList = None) -> None: @@ -235,7 +235,7 @@ def delete_datamodel_all( print(f" Deleting {len(containers.as_ids())} containers in the space {space_name}") if delete_nodes or delete_edges: delete_instances( - ToolGlobals, + ToolGlobals.client, space_name=space_name, dry_run=dry_run, delete_edges=delete_edges, diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index b52e03bc5..0ff88840a 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -18,7 +18,7 @@ import json import re from abc import ABC, abstractmethod -from collections import Counter, defaultdict +from collections import Counter from collections.abc import Sequence, Sized from contextlib import suppress from dataclasses import dataclass @@ -48,6 +48,8 @@ ) from cognite.client.data_classes.capabilities import ( Capability, + DataModelInstancesAcl, + DataModelsAcl, DataSetsAcl, ExtractionPipelinesAcl, FilesAcl, @@ -58,18 +60,23 @@ ) from cognite.client.data_classes.data_modeling import ( ContainerApply, - ContainerProperty, + ContainerApplyList, DataModelApply, + DataModelApplyList, + EdgeApply, + EdgeApplyList, NodeApply, NodeApplyList, - NodeOrEdgeData, SpaceApply, + SpaceApplyList, ViewApply, - ViewId, + ViewApplyList, ) +from cognite.client.data_classes.data_modeling.ids import ContainerId, DataModelId, EdgeId, NodeId, ViewId from cognite.client.data_classes.iam import Group, GroupList from cognite.client.exceptions import CogniteAPIError, CogniteDuplicatedError, CogniteNotFoundError from rich import print +from typing_extensions import Self from .delete import delete_instances from .utils import CDFToolConfig, load_yaml_inject_variables @@ -91,6 +98,83 @@ def dump(self, camel_case: bool = True) -> dict[str, Any]: } +@dataclass +class LoadableNodes(CogniteObject): + """ + This is a helper class for nodes that contains arguments that are required for writing the + nodes to CDF. + """ + + auto_create_direct_relations: bool + skip_on_version_conflict: bool + replace: bool + nodes: NodeApplyList + + def __len__(self): + return len(self.nodes) + + def __iter__(self): + return iter(self.nodes) + + @classmethod + def _load(cls, resource: dict[str, Any], cognite_client: CogniteClient | None = None) -> Self: + return cls( + auto_create_direct_relations=resource["autoCreateDirectRelations"], + skip_on_version_conflict=resource["skipOnVersionConflict"], + replace=resource["replace"], + nodes=NodeApplyList.load(resource["nodes"]), + ) + + def dump(self, camel_case: bool = True) -> dict[str, Any]: + return { + "autoCreateDirectRelations" + if camel_case + else "auto_create_direct_relations": self.auto_create_direct_relations, + "skipOnVersionConflict" if camel_case else "skip_on_version_conflict": self.skip_on_version_conflict, + "replace": self.replace, + "nodes": self.nodes.dump(camel_case), + } + + +@dataclass +class LoadableEdges(CogniteObject): + """ + This is a helper class for edges that contains arguments that are required for writing the + edges to CDF. + """ + + auto_create_start_nodes: bool + auto_create_end_nodes: bool + skip_on_version_conflict: bool + replace: bool + edges: EdgeApplyList + + def __len__(self): + return len(self.edges) + + def __iter__(self): + return iter(self.edges) + + @classmethod + def _load(cls, resource: dict[str, Any], cognite_client: CogniteClient | None = None) -> Self: + return cls( + auto_create_start_nodes=resource["autoCreateStartNodes"], + auto_create_end_nodes=resource["autoCreateEndNodes"], + skip_on_version_conflict=resource["skipOnVersionConflict"], + replace=resource["replace"], + edges=EdgeApplyList.load(resource["edges"]), + ) + + def dump(self, camel_case: bool = True) -> dict[str, Any]: + return { + "autoCreateStartNodes" if camel_case else "auto_create_start_nodes": self.auto_create_start_nodes, + "autoCreateEndNodes" if camel_case else "auto_create_end_nodes": self.auto_create_end_nodes, + "skipOnVersionConflict" if camel_case else "skip_on_version_conflict": self.skip_on_version_conflict, + "replace": self.replace, + "edges": self.edges.dump(camel_case), + } + + @dataclass class Difference: added: list[CogniteResource] @@ -125,25 +209,37 @@ class Loader(ABC, Generic[T_ID, T_Resource, T_ResourceList]): folder_name: The name of the folder in the build directory where the files are located. resource_cls: The class of the resource that is loaded. list_cls: The list version of the resource class. + dependencies: A set of loaders that must be loaded before this loader. + _display_name: The name of the resource that is used when printing messages. If this is not set the + api_name is used. """ support_drop = True support_upsert = False filetypes = frozenset({"yaml", "yml"}) + filename_pattern = "" api_name: str folder_name: str resource_cls: type[CogniteResource] list_cls: type[CogniteResourceList] identifier_key: str = "externalId" dependencies: frozenset[Loader] = frozenset() + _display_name: str = "" - def __init__(self, client: CogniteClient): + def __init__(self, client: CogniteClient, ToolGlobals: CDFToolConfig): self.client = client + self.ToolGlobals = ToolGlobals try: self.api_class = self._get_api_class(client, self.api_name) except AttributeError: raise AttributeError(f"Invalid api_name {self.api_name}.") + @property + def display_name(self): + if self._display_name: + return self._display_name + return self.api_name + @staticmethod def _get_api_class(client, api_name: str): parent = client @@ -159,7 +255,7 @@ def _get_api_class(client, api_name: str): @classmethod def create_loader(cls, ToolGlobals: CDFToolConfig): client = ToolGlobals.verify_capabilities(capability=cls.get_required_capability(ToolGlobals)) - return cls(client) + return cls(client, ToolGlobals) @classmethod @abstractmethod @@ -203,9 +299,7 @@ def remove_unchanged(self, local: T_Resource | Sequence[T_Resource]) -> T_Resour return local # Default implementations that can be overridden - def create( - self, items: Sequence[T_Resource], ToolGlobals: CDFToolConfig, drop: bool, filepath: Path - ) -> T_ResourceList: + def create(self, items: Sequence[T_Resource], drop: bool, filepath: Path) -> T_ResourceList: try: created = self.api_class.create(items) return created @@ -215,7 +309,7 @@ def create( return [] else: print(f"[bold red]ERROR:[/] Failed to create resource(s).\n{e}") - ToolGlobals.failed = True + self.ToolGlobals.failed = True return [] except CogniteDuplicatedError as e: print( @@ -224,18 +318,18 @@ def create( return [] except Exception as e: print(f"[bold red]ERROR:[/] Failed to create resource(s).\n{e}") - ToolGlobals.failed = True + self.ToolGlobals.failed = True return [] - def delete(self, ids: Sequence[T_ID]) -> int: + def delete(self, ids: Sequence[T_ID], drop_data: bool) -> int: self.api_class.delete(ids) return len(ids) def retrieve(self, ids: Sequence[T_ID]) -> T_ResourceList: return self.api_class.retrieve(ids) - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> T_Resource | T_ResourceList: - raw_yaml = load_yaml_inject_variables(filepath, ToolGlobals.environment_variables()) + def load_resource(self, filepath: Path, dry_run: bool) -> T_Resource | T_ResourceList: + raw_yaml = load_yaml_inject_variables(filepath, self.ToolGlobals.environment_variables()) if isinstance(raw_yaml, list): return self.list_cls.load(raw_yaml) return self.resource_cls.load(raw_yaml) @@ -265,11 +359,12 @@ class AuthLoader(Loader[int, Group, GroupList]): def __init__( self, client: CogniteClient, + ToolGlobals: CDFToolConfig, target_scopes: Literal[ "all", "all_skipped_validation", "all_scoped_skipped_validation", "resource_scoped_only", "all_scoped_only" ] = "all", ): - super().__init__(client) + super().__init__(client, ToolGlobals) self.load = target_scopes @staticmethod @@ -289,10 +384,10 @@ def create_loader( ] = "all", ): client = ToolGlobals.verify_capabilities(capability=cls.get_required_capability(ToolGlobals)) - return cls(client, target_scopes) + return cls(client, ToolGlobals, target_scopes) @classmethod - def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: + def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability | list[Capability]: return GroupsAcl( [GroupsAcl.Action.Read, GroupsAcl.Action.List, GroupsAcl.Action.Create, GroupsAcl.Action.Delete], GroupsAcl.Scope.All(), @@ -302,14 +397,14 @@ def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: def get_id(cls, item: Group) -> str: return item.name - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> Group: - raw = load_yaml_inject_variables(filepath, ToolGlobals.environment_variables()) + def load_resource(self, filepath: Path, dry_run: bool) -> Group: + raw = load_yaml_inject_variables(filepath, self.ToolGlobals.environment_variables()) for capability in raw.get("capabilities", []): for _, values in capability.items(): if len(values.get("scope", {}).get("datasetScope", {}).get("ids", [])) > 0: if not dry_run and self.load not in ["all_skipped_validation", "all_scoped_skipped_validation"]: values["scope"]["datasetScope"]["ids"] = [ - ToolGlobals.verify_dataset(ext_id) + self.ToolGlobals.verify_dataset(ext_id) for ext_id in values.get("scope", {}).get("datasetScope", {}).get("ids", []) ] else: @@ -318,7 +413,7 @@ def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: boo if len(values.get("scope", {}).get("extractionPipelineScope", {}).get("ids", [])) > 0: if not dry_run and self.load not in ["all_skipped_validation", "all_scoped_skipped_validation"]: values["scope"]["extractionPipelineScope"]["ids"] = [ - ToolGlobals.verify_extraction_pipeline(ext_id) + self.ToolGlobals.verify_extraction_pipeline(ext_id) for ext_id in values.get("scope", {}).get("extractionPipelineScope", {}).get("ids", []) ] else: @@ -330,7 +425,7 @@ def retrieve(self, ids: Sequence[int]) -> T_ResourceList: found = [g for g in remote if g.name in ids] return found - def delete(self, ids: Sequence[int]) -> int: + def delete(self, ids: Sequence[int], drop_data: bool) -> int: # Let's prevent that we delete groups we belong to try: groups = self.client.iam.groups.list().data @@ -358,7 +453,7 @@ def delete(self, ids: Sequence[int]) -> int: self.client.iam.groups.delete(found) return len(found) - def create(self, items: Sequence[Group], ToolGlobals: CDFToolConfig, drop: bool, filepath: Path) -> GroupList: + def create(self, items: Sequence[Group], drop: bool, filepath: Path) -> GroupList: if self.load == "all": to_create = items elif self.load == "all_skipped_validation": @@ -412,7 +507,7 @@ def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: def get_id(self, item: DataSet) -> str: return item.external_id - def delete(self, ids: Sequence[str]) -> int: + def delete(self, ids: Sequence[str], drop_data: bool) -> int: raise NotImplementedError("CDF does not support deleting data sets.") def retrieve(self, ids: Sequence[str]) -> DataSetList: @@ -429,7 +524,7 @@ def fixup_resource(local: DataSet, remote: DataSet) -> DataSet: local.last_updated_time = remote.last_updated_time return local - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> DataSetList: + def load_resource(self, filepath: Path, dry_run: bool) -> DataSetList: resource = load_yaml_inject_variables(filepath, {}) data_sets = list(resource) if isinstance(resource, dict) else resource for data_set in data_sets: @@ -438,9 +533,7 @@ def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: boo data_set["metadata"][key] = json.dumps(value) if isinstance(value, dict) else value return DataSetList.load(data_sets) - def create( - self, items: Sequence[T_Resource], ToolGlobals: CDFToolConfig, drop: bool, filepath: Path - ) -> T_ResourceList | None: + def create(self, items: Sequence[T_Resource], drop: bool, filepath: Path) -> T_ResourceList | None: created = DataSetList([], cognite_client=self.client) # There is a bug in the data set API, so only one duplicated data set is returned at the time, # so we need to iterate. @@ -459,7 +552,7 @@ def create( items.data = [] except Exception as e: print(f"[bold red]ERROR:[/] Failed to create data sets.\n{e}") - ToolGlobals.failed = True + self.ToolGlobals.failed = True return None if len(created) == 0: return None @@ -484,7 +577,7 @@ def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: def get_id(cls, item: RawTable) -> RawTable: return item - def delete(self, ids: Sequence[RawTable]) -> int: + def delete(self, ids: Sequence[RawTable], drop_data: bool) -> int: count = 0 for db_name, raw_tables in itertools.groupby(sorted(ids, key=lambda x: x.db_name), key=lambda x: x.db_name): # Raw tables do not have ignore_unknowns_ids, so we need to catch the error @@ -497,9 +590,7 @@ def delete(self, ids: Sequence[RawTable]) -> int: self.client.raw.databases.delete(name=db_name) return count - def create( - self, items: Sequence[RawTable], ToolGlobals: CDFToolConfig, drop: bool, filepath: Path - ) -> list[RawTable]: + def create(self, items: Sequence[RawTable], drop: bool, filepath: Path) -> list[RawTable]: if len(items) != 1: raise ValueError("Raw tables must be loaded one at a time.") table = items[0] @@ -555,18 +646,18 @@ def get_id(self, item: TimeSeries) -> str: def retrieve(self, ids: Sequence[str]) -> TimeSeriesList: return self.client.time_series.retrieve_multiple(external_ids=ids, ignore_unknown_ids=True) - def delete(self, ids: Sequence[str]) -> int: + def delete(self, ids: Sequence[str], drop_data: bool) -> int: self.client.time_series.delete(external_id=ids, ignore_unknown_ids=True) return len(ids) - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> TimeSeries | TimeSeriesList: + def load_resource(self, filepath: Path, dry_run: bool) -> TimeSeries | TimeSeriesList: resources = load_yaml_inject_variables(filepath, {}) if not isinstance(resources, list): resources = [resources] for resource in resources: if resource.get("dataSetExternalId") is not None: ds_external_id = resource.pop("dataSetExternalId") - resource["dataSetId"] = ToolGlobals.verify_dataset(ds_external_id) if not dry_run else -1 + resource["dataSetId"] = self.ToolGlobals.verify_dataset(ds_external_id) if not dry_run else -1 return TimeSeriesList.load(resources) @@ -593,8 +684,8 @@ def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: def get_id(self, item: Transformation) -> str: return item.external_id - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> Transformation: - raw = load_yaml_inject_variables(filepath, ToolGlobals.environment_variables()) + def load_resource(self, filepath: Path, dry_run: bool) -> Transformation: + raw = load_yaml_inject_variables(filepath, self.ToolGlobals.environment_variables()) # The `authentication` key is custom for this template: source_oidc_credentials = raw.get("authentication", {}).get("read") or raw.get("authentication") or {} destination_oidc_credentials = raw.get("authentication", {}).get("write") or raw.get("authentication") or {} @@ -615,16 +706,15 @@ def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: boo f"Could not find sql file belonging to transformation {filepath.name}. Please run build again." ) transformation.query = sql_file.read_text() - transformation.data_set_id = ToolGlobals.data_set_id + if isinstance(transformation.data_set_id, str): + transformation.data_set_id = self.ToolGlobals.verify_dataset(transformation.data_set_id) return transformation - def delete(self, ids: Sequence[str]) -> int: + def delete(self, ids: Sequence[str], drop_data: bool) -> int: self.client.transformations.delete(external_id=ids, ignore_unknown_ids=True) return len(ids) - def create( - self, items: Sequence[Transformation], ToolGlobals: CDFToolConfig, drop: bool, filepath: Path - ) -> TransformationList: + def create(self, items: Sequence[Transformation], drop: bool, filepath: Path) -> TransformationList: try: created = self.client.transformations.create(items) except CogniteDuplicatedError as e: @@ -636,7 +726,7 @@ def create( return [] except Exception as e: print(f"[bold red]ERROR:[/] Failed to create resource(s).\n{e}") - ToolGlobals.failed = True + self.ToolGlobals.failed = True return TransformationList([]) for t in items if isinstance(items, Sequence) else [items]: if t.schedule.interval != "": @@ -667,18 +757,18 @@ def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: scope, ) - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> Path: + def load_resource(self, filepath: Path, dry_run: bool) -> Path: return filepath @classmethod def get_id(cls, item: Path) -> list[str]: raise NotImplementedError - def delete(self, ids: Sequence[str]) -> int: + def delete(self, ids: Sequence[str], drop_data: bool) -> int: # Drop all datapoints? raise NotImplementedError() - def create(self, items: Sequence[Path], ToolGlobals: CDFToolConfig, drop: bool, filepath: Path) -> TimeSeriesList: + def create(self, items: Sequence[Path], drop: bool, filepath: Path) -> TimeSeriesList: if len(items) != 1: raise ValueError("Datapoints must be loaded one at a time.") datafile = items[0] @@ -712,7 +802,7 @@ def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: def get_id(self, item: ExtractionPipeline) -> str: return item.external_id - def delete(self, ids: Sequence[str]) -> int: + def delete(self, ids: Sequence[str], drop_data: bool) -> int: try: self.client.extraction_pipelines.delete(external_id=ids) return len(ids) @@ -730,16 +820,14 @@ def delete(self, ids: Sequence[str]) -> int: return len(ids) return 0 - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> ExtractionPipeline: + def load_resource(self, filepath: Path, dry_run: bool) -> ExtractionPipeline: resource = load_yaml_inject_variables(filepath, {}) if resource.get("dataSetExternalId") is not None: ds_external_id = resource.pop("dataSetExternalId") - resource["dataSetId"] = ToolGlobals.verify_dataset(ds_external_id) if not dry_run else -1 + resource["dataSetId"] = self.ToolGlobals.verify_dataset(ds_external_id) if not dry_run else -1 return ExtractionPipeline.load(resource) - def create( - self, items: Sequence[T_Resource], ToolGlobals: CDFToolConfig, drop: bool, filepath: Path - ) -> T_ResourceList | None: + def create(self, items: Sequence[T_Resource], drop: bool, filepath: Path) -> T_ResourceList | None: try: return ExtractionPipelineList(self.client.extraction_pipelines.create(items)) @@ -754,7 +842,7 @@ def create( return ExtractionPipelineList(self.client.extraction_pipelines.create(items)) except Exception as e: print(f"[bold red]ERROR:[/] Failed to create extraction pipelines.\n{e}") - ToolGlobals.failed = True + self.ToolGlobals.failed = True return None return None @@ -781,19 +869,19 @@ def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: def get_id(cls, item: FileMetadata) -> str: return item.external_id - def delete(self, ids: Sequence[str]) -> int: + def delete(self, ids: Sequence[str], drop_data: bool) -> int: self.client.files.delete(external_id=ids) return len(ids) - def load_resource( - self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool - ) -> FileMetadata | FileMetadataList: + def load_resource(self, filepath: Path, dry_run: bool) -> FileMetadata | FileMetadataList: try: files = FileMetadataList( - [FileMetadata.load(load_yaml_inject_variables(filepath, ToolGlobals.environment_variables()))] + [FileMetadata.load(load_yaml_inject_variables(filepath, self.ToolGlobals.environment_variables()))] ) except Exception: - files = FileMetadataList.load(load_yaml_inject_variables(filepath, ToolGlobals.environment_variables())) + files = FileMetadataList.load( + load_yaml_inject_variables(filepath, self.ToolGlobals.environment_variables()) + ) # If we have a file with exact one file config, check to see if this is a pattern to expand if len(files.data) == 1 and ("$FILENAME" in files.data[0].external_id or ""): # It is, so replace this file with all files in this folder using the same data @@ -822,12 +910,10 @@ def load_resource( raise FileNotFoundError(f"Could not find file {file.name} referenced in filepath {filepath.name}") if isinstance(file.data_set_id, str): # Replace external_id with internal id - file.data_set_id = ToolGlobals.verify_dataset(file.data_set_id) if not dry_run else -1 + file.data_set_id = self.ToolGlobals.verify_dataset(file.data_set_id) if not dry_run else -1 return files - def create( - self, items: Sequence[FileMetadata], ToolGlobals: CDFToolConfig, drop: bool, filepath: Path - ) -> FileMetadataList: + def create(self, items: Sequence[FileMetadata], drop: bool, filepath: Path) -> FileMetadataList: created = FileMetadataList([]) for meta in items: datafile = filepath.parent / meta.name @@ -838,11 +924,239 @@ def create( print(f" [bold yellow]WARNING:[/] File {meta.external_id} already exists, skipping upload.") except Exception as e: print(f"[bold red]ERROR:[/] Failed to upload file {datafile.name}.\n{e}") - ToolGlobals.failed = True + self.ToolGlobals.failed = True return created return created +@final +class SpaceLoader(Loader[str, SpaceApply, SpaceApplyList]): + api_name = "data_modeling.spaces" + folder_name = "data_models" + filename_pattern = r"^.*\.?(space)$" + resource_cls = SpaceApply + list_cls = SpaceApplyList + + @classmethod + def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> list[Capability]: + return [ + DataModelsAcl( + [DataModelsAcl.Action.Read, DataModelsAcl.Action.Write], + DataModelsAcl.Scope.All(), + ), + # Needed to delete instances + DataModelInstancesAcl( + [DataModelInstancesAcl.Action.Read, DataModelInstancesAcl.Action.Write], + DataModelInstancesAcl.Scope.All(), + ), + ] + + @classmethod + def get_id(cls, item: SpaceApply) -> str: + return item.space + + def delete(self, ids: Sequence[str], drop_data: bool) -> int: + if not drop_data: + print(" [bold]INFO:[/] Skipping deletion of spaces as drop_data flag is not set...") + return 0 + print("[bold]Deleting existing data...[/]") + for space in ids: + delete_instances( + ToolGlobals=self.ToolGlobals, + space_name=space, + ) + + deleted = self.client.data_modeling.spaces.delete(ids) + return len(deleted) + + def create(self, items: Sequence[SpaceApply], drop: bool, filepath: Path) -> T_ResourceList: + return self.client.data_modeling.spaces.apply(items) + + +class ContainerLoader(Loader[ContainerId, ContainerApply, ContainerApplyList]): + api_name = "data_modeling.containers" + folder_name = "data_models" + filename_pattern = r"^.*\.?(container)$" + resource_cls = ContainerApply + list_cls = ContainerApplyList + dependencies = frozenset({SpaceLoader}) + + @classmethod + def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: + # Todo Scoped to spaces + return DataModelsAcl( + [DataModelsAcl.Action.Read, DataModelsAcl.Action.Write], + DataModelsAcl.Scope.All(), + ) + + @classmethod + def get_id(cls, item: ContainerApply) -> ContainerId: + return item.as_id() + + def delete(self, ids: Sequence[ContainerId], drop_data: bool) -> int: + if not drop_data: + print(" [bold]INFO:[/] Skipping deletion of containers as drop_data flag is not set...") + return 0 + deleted = self.client.data_modeling.containers.delete(ids) + return len(deleted) + + def create(self, items: Sequence[ContainerApply], drop: bool, filepath: Path) -> T_ResourceList: + self.ToolGlobals.verify_spaces(list({item.space for item in items})) + + return self.client.data_modeling.containers.apply(items) + + +class ViewLoader(Loader[ViewId, ViewApply, ViewApplyList]): + api_name = "data_modeling.views" + folder_name = "data_models" + filename_pattern = r"^.*\.?(view)$" + resource_cls = ViewApply + list_cls = ViewApplyList + dependencies = frozenset({SpaceLoader, ContainerLoader}) + + @classmethod + def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: + # Todo Scoped to spaces + return DataModelsAcl( + [DataModelsAcl.Action.Read, DataModelsAcl.Action.Write], + DataModelsAcl.Scope.All(), + ) + + @classmethod + def get_id(cls, item: ViewApply) -> ViewId: + return item.as_id() + + def create(self, items: Sequence[T_Resource], drop: bool, filepath: Path) -> T_ResourceList: + self.ToolGlobals.verify_spaces(list({item.space for item in items})) + return self.client.data_modeling.views.apply(items) + + +@final +class DataModelLoader(Loader[DataModelId, DataModelApply, DataModelApplyList]): + api_name = "data_modeling.data_models" + folder_name = "data_models" + filename_pattern = r"^.*\.?(datamodel)$" + resource_cls = DataModelApply + list_cls = DataModelApplyList + dependencies = frozenset({SpaceLoader, ViewLoader}) + + @classmethod + def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: + # Todo Scoped to spaces + return DataModelsAcl( + [DataModelsAcl.Action.Read, DataModelsAcl.Action.Write], + DataModelsAcl.Scope.All(), + ) + + @classmethod + def get_id(cls, item: DataModelApply) -> DataModelId: + return item.as_id() + + def create(self, items: Sequence[T_Resource], drop: bool, filepath: Path) -> T_ResourceList: + self.ToolGlobals.verify_spaces(list({item.space for item in items})) + return self.client.data_modeling.data_models.apply(items) + + +@final +class NodeLoader(Loader[list[NodeId], NodeApply, LoadableNodes]): + api_name = "data_modeling.instances" + folder_name = "data_models" + filename_pattern = r"^.*\.?(node)$" + resource_cls = NodeApply + list_cls = LoadableNodes + dependencies = frozenset({SpaceLoader, ViewLoader, ContainerLoader}) + _display_name = "nodes" + + @classmethod + def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: + # Todo Scoped to spaces + return DataModelInstancesAcl( + [DataModelInstancesAcl.Action.Read, DataModelInstancesAcl.Action.Write], + DataModelInstancesAcl.Scope.All(), + ) + + def get_id(self, item: NodeApply) -> NodeId: + return item.as_id() + + def load_resource(self, filepath: Path, dry_run: bool) -> LoadableNodes: + raw = load_yaml_inject_variables(filepath, self.ToolGlobals.environment_variables()) + if isinstance(raw, list): + raise ValueError(f"Unexpected node yaml file format {filepath.name}") + return LoadableNodes.load(raw, cognite_client=self.client) + + def delete(self, ids: Sequence[NodeId], drop_data: bool) -> int: + if not drop_data: + print(" [bold]INFO:[/] Skipping deletion of nodes as drop_data flag is not set...") + return 0 + deleted = self.client.data_modeling.instances.delete(nodes=ids) + return len(deleted.nodes) + + def create(self, items: Sequence[LoadableNodes], drop: bool, filepath: Path) -> LoadableNodes: + if not isinstance(items, LoadableNodes): + raise ValueError("Unexpected node format file format") + self.ToolGlobals.verify_spaces(list({item.space for item in items})) + item = items + _ = self.client.data_modeling.instances.apply( + nodes=item.nodes, + auto_create_direct_relations=item.auto_create_direct_relations, + skip_on_version_conflict=item.skip_on_version_conflict, + replace=item.replace, + ) + return items + + +@final +class EdgeLoader(Loader[EdgeId, EdgeApply, LoadableEdges]): + api_name = "data_modeling.instances" + folder_name = "data_models" + filename_pattern = r"^.*\.?(edge)$" + resource_cls = EdgeApply + list_cls = LoadableEdges + _display_name = "edges" + + # Note edges do not need nodes to be created first, as they are created as part of the edge creation. + # However, for deletion (reversed order) we need to delete edges before nodes. + dependencies = frozenset({SpaceLoader, ViewLoader, NodeLoader}) + + @classmethod + def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: + # Todo Scoped to spaces + return DataModelInstancesAcl( + [DataModelInstancesAcl.Action.Read, DataModelInstancesAcl.Action.Write], + DataModelInstancesAcl.Scope.All(), + ) + + def get_id(self, item: EdgeApply) -> EdgeId: + return item.as_id() + + def load_resource(self, filepath: Path, dry_run: bool) -> LoadableEdges: + raw = load_yaml_inject_variables(filepath, self.ToolGlobals.environment_variables()) + if isinstance(raw, list): + raise ValueError(f"Unexpected edge yaml file format {filepath.name}") + return LoadableEdges.load(raw, cognite_client=self.client) + + def delete(self, ids: Sequence[EdgeId], drop_data: bool) -> int: + if not drop_data: + print(" [bold]INFO:[/] Skipping deletion of edges as drop_data flag is not set...") + return 0 + deleted = self.client.data_modeling.instances.delete(edges=ids) + return len(deleted.edges) + + def create(self, items: Sequence[LoadableEdges], drop: bool, filepath: Path) -> LoadableEdges: + if not isinstance(items, LoadableEdges): + raise ValueError("Unexpected edge format file format") + self.ToolGlobals.verify_spaces(list({item.space for item in items})) + item = items + _ = self.client.data_modeling.instances.apply( + edges=item.edges, + auto_create_start_nodes=item.auto_create_start_nodes, + auto_create_end_nodes=item.auto_create_end_nodes, + skip_on_version_conflict=item.skip_on_version_conflict, + replace=item.replace, + ) + return items + + def drop_load_resources( loader: Loader, path: Path, @@ -851,6 +1165,7 @@ def drop_load_resources( clean: bool = False, load: bool = True, dry_run: bool = False, + drop_data: bool = False, verbose: bool = False, ): if path.is_file(): @@ -862,18 +1177,26 @@ def drop_load_resources( else: filepaths = [file for file in path.glob("**/*")] - items = [loader.load_resource(f, ToolGlobals, dry_run) for f in filepaths] + if loader.filename_pattern: + # This is used by data modelings resources to filter out files that are not of the correct type + # as these resources share the same folder. + pattern = re.compile(loader.filename_pattern) + filepaths = [file for file in filepaths if pattern.match(file.stem)] + + items = [loader.load_resource(f, dry_run) for f in filepaths] nr_of_batches = len(items) nr_of_items = sum(len(item) if isinstance(item, Sized) else 1 for item in items) + if nr_of_items == 0: + return nr_of_deleted = 0 nr_of_created = 0 if load: - print(f"[bold]Uploading {nr_of_items} {loader.api_name} in {nr_of_batches} batches to CDF...[/]") + print(f"[bold]Uploading {nr_of_items} {loader.display_name} in {nr_of_batches} batches to CDF...[/]") else: - print(f"[bold]Cleaning {nr_of_items} {loader.api_name} in {nr_of_batches} batches to CDF...[/]") + print(f"[bold]Cleaning {nr_of_items} {loader.display_name} in {nr_of_batches} batches to CDF...[/]") batches = [item if isinstance(item, Sized) else [item] for item in items] if drop and loader.support_drop and load: - print(f" --drop is specified, will delete existing {loader.api_name} before uploading.") + print(f" --drop is specified, will delete existing {loader.display_name} before uploading.") if (drop and loader.support_drop) or clean: for batch in batches: drop_items: list = [] @@ -884,18 +1207,20 @@ def drop_load_resources( drop_items.append(loader.get_id(item)) if not dry_run: try: - nr_of_deleted += loader.delete(drop_items) + nr_of_deleted += loader.delete(drop_items, drop_data) if verbose: - print(f" Deleted {len(drop_items)} {loader.api_name}.") + print(f" Deleted {len(drop_items)} {loader.display_name}.") except CogniteAPIError as e: if e.code == 404: - print(f" [bold yellow]WARNING:[/] {len(drop_items)} {loader.api_name} do(es) not exist.") + print(f" [bold yellow]WARNING:[/] {len(drop_items)} {loader.display_name} do(es) not exist.") except CogniteNotFoundError: - print(f" [bold yellow]WARNING:[/] {len(drop_items)} {loader.api_name} do(es) not exist.") + print(f" [bold yellow]WARNING:[/] {len(drop_items)} {loader.display_name} do(es) not exist.") except Exception as e: - print(f" [bold yellow]WARNING:[/] Failed to delete {len(drop_items)} {loader.api_name}. Error {e}") + print( + f" [bold yellow]WARNING:[/] Failed to delete {len(drop_items)} {loader.display_name}. Error {e}" + ) else: - print(f" Would have deleted {len(drop_items)} {loader.api_name}.") + print(f" Would have deleted {len(drop_items)} {loader.display_name}.") if not load: return try: @@ -903,415 +1228,31 @@ def drop_load_resources( for batch, filepath in zip(batches, filepaths): if not drop and loader.support_upsert: if verbose: - print(f" Comparing {len(batch)} {loader.api_name} from {filepath}...") + print(f" Comparing {len(batch)} {loader.display_name} from {filepath}...") batch = loader.remove_unchanged(batch) if verbose: - print(f" {len(batch)} {loader.api_name} to be deployed...") + print(f" {len(batch)} {loader.display_name} to be deployed...") if len(batch) > 0: - created = loader.create(batch, ToolGlobals, drop, filepath) + created = loader.create(batch, drop, filepath) nr_of_created += len(created) if created is not None else 0 if isinstance(loader, AuthLoader): nr_of_deleted += len(created) except Exception as e: - print(f"[bold red]ERROR:[/] Failed to upload {loader.api_name}.") + print(f"[bold red]ERROR:[/] Failed to upload {loader.display_name}.") print(e) ToolGlobals.failed = True return - print(f" Deleted {nr_of_deleted} out of {nr_of_items} {loader.api_name} from {len(filepaths)} config files.") - print(f" Created {nr_of_created} out of {nr_of_items} {loader.api_name} from {len(filepaths)} config files.") - - -LOADER_BY_FOLDER_NAME = {loader.folder_name: loader for loader in Loader.__subclasses__()} - - -def load_datamodel_graphql( - ToolGlobals: CDFToolConfig, - space_name: str | None = None, - model_name: str | None = None, - directory=None, -) -> None: - """Load a graphql datamodel from file.""" - if space_name is None or model_name is None or directory is None: - raise ValueError("space_name, model_name, and directory must be supplied.") - with open(f"{directory}/datamodel.graphql") as file: - # Read directly into a string. - datamodel = file.read() - # Clear any delete errors - ToolGlobals.failed = False - client = ToolGlobals.verify_client( - capabilities={ - "dataModelsAcl": ["READ", "WRITE"], - "dataModelInstancesAcl": ["READ", "WRITE"], - } - ) - print(f"[bold]Loading data model {model_name} into space {space_name} from {directory}...[/]") - try: - client.data_modeling.graphql.apply_dml( - (space_name, model_name, "1"), - dml=datamodel, - name=model_name, - description=f"Data model for {model_name}", - ) - except Exception as e: - print(f"[bold red]ERROR:[/] Failed to write data model {model_name} to space {space_name}.") - print(e) - ToolGlobals.failed = True - return - print(f" Created data model {model_name}.") - - -def load_datamodel( - ToolGlobals: CDFToolConfig, - drop: bool = False, - drop_data: bool = False, - delete_removed: bool = True, - delete_containers: bool = False, - delete_spaces: bool = False, - directory: Path | None = None, - dry_run: bool = False, - only_drop: bool = False, -) -> None: - """Load containers, views, spaces, and data models from a directory - - Note that this function will never delete instances, but will delete all - the properties found in containers if delete_containers is specified. - delete_spaces will fail unless also the edges and nodes have been deleted, - e.g. using the clean_out_datamodel() function. - - Note that if delete_spaces flag is True, an attempt will be made to delete the space, - but if it fails, the loading will continue. If delete_containers is True, the loading - will abort if deletion fails. - Args: - drop: Whether to drop all existing data model entities (default: apply just the diff). - drop_data: Whether to drop all instances (nodes and edges) in all spaces. - delete_removed: Whether to delete (previous) resources that are not in the directory. - delete_containers: Whether to delete containers including data in the instances. - delete_spaces: Whether to delete spaces (requires containers and instances to be deleted). - directory: Directory to load from. - dry_run: Whether to perform a dry run and only print out what will happen. - only_drop: Whether to only drop existing resources and not load new ones. - """ - if directory is None: - raise ValueError("directory must be supplied.") - if (delete_containers or delete_spaces) and not drop: - raise ValueError("drop must be True if delete_containers or delete_spaces is True.") - if (delete_spaces or delete_containers) and not drop_data: - raise ValueError("drop_data must be True if delete_spaces or delete_containers is True.") - model_files_by_type: dict[str, list[Path]] = defaultdict(list) - models_pattern = re.compile(r"^.*\.?(space|container|view|datamodel)\.yaml$") - for file in directory.rglob("*.yaml"): - if not (match := models_pattern.match(file.name)): - continue - model_files_by_type[match.group(1)].append(file) - print("[bold]Loading data model files from build directory...[/]") - for type_, files in model_files_by_type.items(): - model_files_by_type[type_].sort() - print(f" {len(files)} of type {type_}s in {directory}") - - cognite_resources_by_type: dict[str, list[ContainerApply | ViewApply | DataModelApply | SpaceApply]] = defaultdict( - list - ) - for type_, files in model_files_by_type.items(): - resource_cls = { - "space": SpaceApply, - "container": ContainerApply, - "view": ViewApply, - "datamodel": DataModelApply, - }[type_] - for file in files: - cognite_resources_by_type[type_].append( - resource_cls.load(load_yaml_inject_variables(file, ToolGlobals.environment_variables())) - ) - # Remove duplicates - for type_ in list(cognite_resources_by_type): - unique = {r.as_id(): r for r in cognite_resources_by_type[type_]} - cognite_resources_by_type[type_] = list(unique.values()) - - explicit_space_list = [s.space for s in cognite_resources_by_type["space"]] - space_list = list({r.space for _, resources in cognite_resources_by_type.items() for r in resources}) - - implicit_spaces = [SpaceApply(space=s, name=s, description="Imported space") for s in space_list] - for s in implicit_spaces: - if s.space not in [s2.space for s2 in cognite_resources_by_type["space"]]: - print( - f" [bold red]ERROR[/] Space {s.name} is implicitly defined and may need it's own {s.name}.space.yaml file." - ) - cognite_resources_by_type["space"].append(s) - # Clear any delete errors - ToolGlobals.failed = False - client = ToolGlobals.verify_client( - capabilities={ - "dataModelsAcl": ["READ", "WRITE"], - "dataModelInstancesAcl": ["READ", "WRITE"], - } - ) - - existing_resources_by_type: dict[str, list[ContainerApply | ViewApply | DataModelApply | SpaceApply]] = defaultdict( - list - ) - resource_api_by_type = { - "container": client.data_modeling.containers, - "view": client.data_modeling.views, - "datamodel": client.data_modeling.data_models, - "space": client.data_modeling.spaces, - } - for type_, resources in cognite_resources_by_type.items(): - attempts = 5 - while attempts > 0: - try: - existing_resources_by_type[type_] = ( - resource_api_by_type[type_].retrieve(list({r.as_id() for r in resources})).as_apply() - ) - attempts = 0 - except CogniteAPIError as e: - attempts -= 1 - if e.code == 500 and attempts > 0: - continue - print(f"[bold]ERROR:[/] Failed to retrieve {type_}(s):\n{e}") - ToolGlobals.failed = True - return - except Exception as e: - print(f"[bold]ERROR:[/] Failed to retrieve {type_}(s):\n{e}") - ToolGlobals.failed = True - return - - differences: dict[str, Difference] = {} - for type_, resources in cognite_resources_by_type.items(): - new_by_id = {r.as_id(): r for r in resources} - existing_by_id = {r.as_id(): r for r in existing_resources_by_type[type_]} - - added = [r for r in resources if r.as_id() not in existing_by_id] - removed = [r for r in existing_resources_by_type[type_] if r.as_id() not in new_by_id] - - changed = [] - unchanged = [] - # Due to a bug in the SDK, we need to ensure that the new properties of the container - # has set the default values as these will be set for the existing container and - # the comparison will fail. - for existing_id in set(new_by_id.keys()) & set(existing_by_id.keys()): - new = new_by_id[existing_id] - existing = existing_by_id[existing_id] - if isinstance(new, ContainerApply): - for p, _ in existing.properties.items(): - new.properties[p] = ContainerProperty( - type=new.properties[p].type, - nullable=new.properties[p].nullable or True, - auto_increment=new.properties[p].auto_increment or False, - default_value=new.properties[p].default_value or None, - description=new.properties[p].description or None, - ) - - if new_by_id[existing_id] == existing_by_id[existing_id]: - unchanged.append(new_by_id[existing_id]) - else: - changed.append(new_by_id[existing_id]) - - differences[type_] = Difference(added, removed, changed, unchanged) - - creation_order = ["space", "container", "view", "datamodel"] - - if drop_data: - print("[bold]Deleting existing data...[/]") - deleted = 0 - for i in explicit_space_list: - if not dry_run: - delete_instances( - ToolGlobals, - space_name=i, - dry_run=dry_run, - ) - if ToolGlobals.failed: - print(f" [bold]ERROR:[/] Failed to delete instances in space {i}.") - return - else: - print(f" Would have deleted instances in space {i}.") - - if drop: - print("[bold]Deleting existing configurations...[/]") - # Clean out all old resources - for type_ in reversed(creation_order): - items = cognite_resources_by_type.get(type_) - if items is None: - continue - if type_ == "container" and not delete_containers: - print(" [bold]INFO:[/] Skipping deletion of containers as delete_containers flag is not set...") - continue - if type_ == "space" and not delete_spaces: - print(" [bold]INFO:[/] Skipping deletion of spaces as delete_spaces flag is not set...") - continue - deleted = 0 - if not dry_run: - if type_ == "space": - for i2 in items: - # Only delete spaces that have been explicitly defined - if i2.space in explicit_space_list: - try: - ret = resource_api_by_type["space"].delete(i2.space) - except Exception: - ToolGlobals.failed = False - print(f" [bold]INFO:[/] Deletion of space {i2.space} was not successful, continuing.") - continue - if len(ret) > 0: - deleted += 1 - else: - try: - ret = resource_api_by_type[type_].delete([i.as_id() for i in items]) - except CogniteAPIError as e: - # Typically spaces can not be deleted if there are other - # resources in the space. - print(f" [bold]ERROR:[/] Failed to delete {type_}(s):\n{e}") - return - deleted += len(ret) - print(f" Deleted {deleted} {type_}(s).") - else: - print(f" Would have deleted {deleted} {type_}(s).") - - if not only_drop: - print("[bold]Creating new configurations...[/]") - for type_ in creation_order: - if type_ not in differences: - continue - items = differences[type_] - if items.added: - print(f" {len(items.added)} added {type_}(s) to be deployed...") - if dry_run: - continue - attempts = 5 - while attempts > 0: - try: - resource_api_by_type[type_].apply(items.added) - attempts = 0 - except Exception as e: - attempts -= 1 - if attempts > 0: - continue - print(f"[bold]ERROR:[/] Failed to create {type_}(s):\n{e}") - ToolGlobals.failed = True - return - print(f" Created {len(items.added)} {type_}(s).") - elif items.changed: - print(f" {len(items.changed)} changed {type_}(s) to be deployed...") - if dry_run: - continue - attempts = 5 - while attempts > 0: - try: - resource_api_by_type[type_].apply(items.changed) - attempts = 0 - except Exception as e: - attempts -= 1 - if attempts > 0: - continue - print(f"[bold]ERROR:[/] Failed to create {type_}(s):\n{e}") - ToolGlobals.failed = True - return - if drop: - print( - f" Created {len(items.changed)} {type_}s that could have been updated instead (--drop specified)." - ) - else: - print(f" Updated {len(items.changed)} {type_}(s).") - elif items.unchanged: - print(f" {len(items.unchanged)} unchanged {type_}(s).") - if drop: - attempts = 5 - while attempts > 0: - try: - resource_api_by_type[type_].apply(items.unchanged) - attempts = 0 - except Exception as e: - attempts -= 1 - if attempts > 0: - continue - print(f"[bold]ERROR:[/] Failed to create {type_}(s):\n{e}") - ToolGlobals.failed = True - return - print( - f" Created {len(items.unchanged)} unchanged {type_}(s) that could have been skipped (--drop specified)." - ) - - if delete_removed and not drop: - for type_ in reversed(creation_order): - if type_ not in differences: - continue - items = differences[type_] - if items.removed: - if dry_run: - print(f" Would have deleted {len(items.removed)} {type_}(s).") - continue - try: - resource_api_by_type[type_].delete(items.removed) - except CogniteAPIError as e: - # Typically spaces can not be deleted if there are other - # resources in the space. - print(f"[bold]ERROR:[/] Failed to delete {len(items.removed)} {type_}(s).") - print(e) - ToolGlobals.failed = True - continue - print(f" Deleted {len(items.removed)} {type_}(s) that were removed.") - - -def load_nodes( - ToolGlobals: CDFToolConfig, - directory: Path | None = None, - dry_run: bool = False, -) -> None: - """Insert nodes""" - - for file in directory.rglob("*.node.yaml"): - if file.name == "config.yaml": - continue - - client: CogniteClient = ToolGlobals.verify_client( - capabilities={ - "dataModelsAcl": ["READ"], - "dataModelInstancesAcl": ["READ", "WRITE"], - } + if nr_of_deleted != 0: + print( + f" Deleted {nr_of_deleted} out of {nr_of_items} {loader.display_name} from {len(filepaths)} config files." ) - nodes: dict = load_yaml_inject_variables(file, ToolGlobals.environment_variables()) + print(f" Created {nr_of_created} out of {nr_of_items} {loader.display_name} from {len(filepaths)} config files.") - try: - view = ViewId( - space=nodes["view"]["space"], - external_id=nodes["view"]["externalId"], - version=nodes["view"]["version"], - ) - except KeyError: - raise KeyError( - f"Expected view configuration not found in {file}:\nview:\n space: \n externalId: \n version: " - ) - try: - node_space: str = nodes["destination"]["space"] - except KeyError: - raise KeyError( - f"Expected destination space configuration in {file}:\ndestination:\n space: " - ) - node_list: NodeApplyList = [] - try: - for n in nodes.get("nodes", []): - node_list.append( - NodeApply( - space=node_space, - external_id=n.pop("externalId"), - existing_version=n.pop("existingVersion", None), - sources=[NodeOrEdgeData(source=view, properties=n)], - ) - ) - except Exception as e: - raise KeyError(f"Failed to parse node {n} in {file}:\n{e}") - print(f"[bold]Loading {len(node_list)} node(s) from {directory}...[/]") - if not dry_run: - try: - client.data_modeling.instances.apply( - nodes=node_list, - auto_create_direct_relations=nodes.get("autoCreateDirectRelations", True), - skip_on_version_conflict=nodes.get("skipOnVersionConflict", False), - replace=nodes.get("replace", False), - ) - print(f" Created {len(node_list)} node(s) in {node_space}.") - except CogniteAPIError as e: - print(f"[bold]ERROR:[/] Failed to create {len(node_list)} node(s) in {node_space}:\n{e}") - ToolGlobals.failed = True - return +LOADER_BY_FOLDER_NAME: dict[str, list[type[Loader]]] = {} +for loader in Loader.__subclasses__(): + if loader.folder_name not in LOADER_BY_FOLDER_NAME: + LOADER_BY_FOLDER_NAME[loader.folder_name] = [] + LOADER_BY_FOLDER_NAME[loader.folder_name].append(loader) +del loader # cleanup module namespace diff --git a/cognite_toolkit/cdf_tk/templates.py b/cognite_toolkit/cdf_tk/templates.py index 59131c5a7..3bda5e70a 100644 --- a/cognite_toolkit/cdf_tk/templates.py +++ b/cognite_toolkit/cdf_tk/templates.py @@ -166,7 +166,15 @@ def check_yaml_semantics(parsed: Any, filepath_src: Path, filepath_build: Path, ext_id = parsed.get("space") ext_id_type = "space" elif resource_type == "data_models" and ".node." in filepath_src.name: - ext_id = parsed.get("view", {}).get("externalId") or parsed.get("view", {}).get("external_id") + try: + ext_ids = {source["source"]["externalId"] for node in parsed["nodes"] for source in node["sources"]} + except KeyError: + print(f" [bold red]:[/] Node file {filepath_src} has invalid dataformat.") + exit(1) + if len(ext_ids) != 1: + print(f" [bold red]:[/] All nodes in {filepath_src} must have the same view.") + exit(1) + ext_id = ext_ids.pop() ext_id_type = "view.externalId" elif resource_type == "auth": ext_id = parsed.get("name") @@ -375,13 +383,23 @@ def process_config_files( f" [bold red]ERROR:[/] YAML validation error for {file_name} after substituting config variables: \n{e}" ) exit(1) - if not check_yaml_semantics( - parsed=parsed, - filepath_src=orig_file, - filepath_build=filepath, - ): - exit(1) + + if isinstance(parsed, dict): + parsed = [parsed] + for item in parsed: + if not check_yaml_semantics( + parsed=item, + filepath_src=orig_file, + filepath_build=filepath, + ): + exit(1) loader = LOADER_BY_FOLDER_NAME.get(filepath.parent.name) + if len(loader) == 1: + loader = loader[0] + else: + loader = next( + (loader for loader in loader if re.match(loader.filename_pattern, filepath.stem)), None + ) if loader: load_warnings = validate_case_raw( parsed, loader.resource_cls, filepath, identifier_key=loader.identifier_key diff --git a/cognite_toolkit/cdf_tk/utils.py b/cognite_toolkit/cdf_tk/utils.py index 1c415d6c9..8d5c8645a 100644 --- a/cognite_toolkit/cdf_tk/utils.py +++ b/cognite_toolkit/cdf_tk/utils.py @@ -64,6 +64,7 @@ def __init__( self._failed = False self._environ = {} self._data_set_id_by_external_id: dict[str, id] = {} + self._existing_spaces: set[str] = set() self.oauth_credentials = OAuthClientCredentials( token_url="", client_id="", @@ -339,7 +340,7 @@ def verify_extraction_pipeline(self, external_id: str) -> int: try: pipeline = self.client.extraction_pipelines.retrieve(external_id=external_id) except CogniteAPIError as e: - raise CogniteAuthError("Don't have correct access rights. Need READ on datasetsAcl.") from e + raise CogniteAuthError("Don't have correct access rights. Need READ on extractionPipelinesAcl.") from e if pipeline is not None: return pipeline.id @@ -347,6 +348,34 @@ def verify_extraction_pipeline(self, external_id: str) -> int: f"Extraction pipeline {external_id} does not exist, you need to create it first. Do this by adding a config file to the extraction_pipelines folder." ) + def verify_spaces(self, space: str | list[str]) -> list[str]: + """Verify that the configured space exists and is accessible + + Args: + space (str): External id of the space to verify + + Yields: + spaces (str) + Re-raises underlying SDK exception + """ + if isinstance(space, str): + space = [space] + if all([s in self._existing_spaces for s in space]): + return space + + self.verify_client(capabilities={"dataModelsAcl": ["READ"]}) + try: + existing = self.client.data_modeling.spaces.retrieve(space) + except CogniteAPIError as e: + raise CogniteAuthError("Don't have correct access rights. Need READ on dataModelsAcl.") from e + + if missing := (({space} if isinstance(space, str) else set(space)) - set(existing.as_ids())): + raise ValueError( + f"Space {missing} does not exist, you need to create it first. Do this by adding a config file to the data model folder." + ) + self._existing_spaces.update([space.space for space in existing]) + return [space.space for space in existing] + def load_yaml_inject_variables(filepath: Path, variables: dict[str, str]) -> dict[str, Any] | list[dict[str, Any]]: content = filepath.read_text() diff --git a/cognite_toolkit/experimental/cdf_asset_source_model/data_models/2.ModelSpace.space.yaml b/cognite_toolkit/experimental/cdf_asset_source_model/data_models/2.ModelSpace.space.yaml new file mode 100644 index 000000000..bf2c2d7fa --- /dev/null +++ b/cognite_toolkit/experimental/cdf_asset_source_model/data_models/2.ModelSpace.space.yaml @@ -0,0 +1,3 @@ +space: {{model_space}} +name: {{model_space}} +description: Space for the model in the ExtendedSourceData data model diff --git a/cognite_toolkit/experimental/example_pump_data_model/data_models/1.InstanceSpace.space.yaml b/cognite_toolkit/experimental/example_pump_data_model/data_models/1.InstanceSpace.space.yaml deleted file mode 100644 index b99f7679d..000000000 --- a/cognite_toolkit/experimental/example_pump_data_model/data_models/1.InstanceSpace.space.yaml +++ /dev/null @@ -1,3 +0,0 @@ -space: {{instance_space}} -name: {{instance_space}} -description: Space for the instances. diff --git a/cognite_toolkit/experimental/example_pump_data_model/data_models/1.spaces.space.yaml b/cognite_toolkit/experimental/example_pump_data_model/data_models/1.spaces.space.yaml new file mode 100644 index 000000000..abf4dc0b5 --- /dev/null +++ b/cognite_toolkit/experimental/example_pump_data_model/data_models/1.spaces.space.yaml @@ -0,0 +1,6 @@ +- space: {{instance_space}} + name: {{instance_space}} + description: Space for the instances. +- space: {{model_space}} + name: {{model_space}} + description: Space for the Pump Model. diff --git a/cognite_toolkit/modules/cdf_infield_location/data_models/infield_apm_app_config.node.yaml b/cognite_toolkit/modules/cdf_infield_location/data_models/infield_apm_app_config.node.yaml index b5f3ee079..c8b81c35b 100644 --- a/cognite_toolkit/modules/cdf_infield_location/data_models/infield_apm_app_config.node.yaml +++ b/cognite_toolkit/modules/cdf_infield_location/data_models/infield_apm_app_config.node.yaml @@ -1,23 +1,25 @@ autoCreateDirectRelations: True skipOnVersionConflict: False replace: True -view: - space: APM_Config - externalId: APM_Config - version: '1' -destination: - space: APM_Config nodes: - - customerDataSpaceId: APM_SourceData - customerDataSpaceVersion: '1' - name: Default location + - space: {{apm_config_instance_space}} externalId: default_infield_config_minimal - featureConfiguration: - rootLocationConfigurations: - - assetExternalId: {{root_asset_external_id}} - appDataInstanceSpace: sp_infield_{{default_location}}_app_data - sourceDataInstanceSpace: sp_asset_{{default_location}}_source - templateAdmins: - - gp_infield_{{default_location}}_template_admins - checklistAdmins: - - gp_infield_{{default_location}}_checklist_admins + sources: + - source: + space: APM_Config + externalId: APM_Config + version: '1' + type: view + properties: + featureConfiguration: + rootLocationConfigurations: + - assetExternalId: {{root_asset_external_id}} + appDataInstanceSpace: sp_infield_{{default_location}}_app_data + sourceDataInstanceSpace: sp_asset_{{default_location}}_source + templateAdmins: + - gp_infield_{{default_location}}_template_admins + checklistAdmins: + - gp_infield_{{default_location}}_checklist_admins + customerDataSpaceId: APM_SourceData + customerDataSpaceVersion: '1' + name: Default location diff --git a/cognite_toolkit/modules/cdf_infield_location/default.config.yaml b/cognite_toolkit/modules/cdf_infield_location/default.config.yaml index 188e27202..ebf70de50 100644 --- a/cognite_toolkit/modules/cdf_infield_location/default.config.yaml +++ b/cognite_toolkit/modules/cdf_infield_location/default.config.yaml @@ -6,6 +6,7 @@ default_location: oid module_version: '1' apm_datamodel_space: 'APM_SourceData' apm_app_config_external_id: 'default-infield-config-minimal' +apm_config_instance_space: 'APM_Config' # RAW databases to load workorders and other workorder data from # The below values point to the RAW database in the cdf_oid_example_data and should be # changed if you want to load workorders from another RAW database. diff --git a/tests/conftest.py b/tests/conftest.py index c98c9475f..7caebaa9c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -53,6 +53,7 @@ ViewApplyList, ViewList, ) +from cognite.client.data_classes.data_modeling.ids import EdgeId, InstanceId, NodeId from cognite.client.testing import monkeypatch_cognite_client TEST_FOLDER = Path(__file__).resolve().parent @@ -304,7 +305,7 @@ def delete_core( def delete_data_modeling(ids: VersionedDataModelingId | Sequence[VersionedDataModelingId]) -> list: deleted = [] - if isinstance(ids, VersionedDataModelingId): + if isinstance(ids, (VersionedDataModelingId, InstanceId)): deleted.append(ids.dump(camel_case=True)) elif isinstance(ids, Sequence): deleted.extend([id.dump(camel_case=True) for id in ids]) @@ -312,6 +313,38 @@ def delete_data_modeling(ids: VersionedDataModelingId | Sequence[VersionedDataMo deleted_resources[resource_cls.__name__].extend(deleted) return deleted + def delete_instances( + nodes: NodeId | Sequence[NodeId] | tuple[str, str] | Sequence[tuple[str, str]] | None = None, + edges: EdgeId | Sequence[EdgeId] | tuple[str, str] | Sequence[tuple[str, str]] | None = None, + ) -> list: + deleted = [] + if isinstance(nodes, NodeId): + deleted.append(nodes.dump(camel_case=True, include_instance_type=True)) + elif isinstance(nodes, tuple): + deleted.append(NodeId(*nodes).dump(camel_case=True, include_instance_type=True)) + elif isinstance(edges, EdgeId): + deleted.append(edges.dump(camel_case=True, include_instance_type=True)) + elif isinstance(edges, tuple): + deleted.append(EdgeId(*edges).dump(camel_case=True, include_instance_type=True)) + elif isinstance(nodes, Sequence): + deleted.extend( + [ + node.dump(camel_case=True, include_instance_type=True) if isinstance(node, NodeId) else node + for node in nodes + ] + ) + elif isinstance(edges, Sequence): + deleted.extend( + [ + edge.dump(camel_case=True, include_instance_type=True) if isinstance(edge, EdgeId) else edge + for edge in edges + ] + ) + + if deleted: + deleted_resources[resource_cls.__name__].extend(deleted) + return deleted + def delete_space(spaces: str | Sequence[str]) -> list: deleted = [] if isinstance(spaces, str): @@ -348,6 +381,8 @@ def delete_raw(db_name: str, name: str | Sequence[str]) -> list: signature = inspect.signature(api_client.delete) if "ids" in signature.parameters: mock.delete = delete_data_modeling + elif "nodes" in signature.parameters: + mock.delete = delete_instances elif "spaces" in signature.parameters: mock.delete = delete_space elif "db_name" in signature.parameters: diff --git a/tests/test_approval_modules.py b/tests/test_approval_modules.py index 0b949125c..b1cea9d4f 100644 --- a/tests/test_approval_modules.py +++ b/tests/test_approval_modules.py @@ -129,7 +129,6 @@ def test_deploy_module_approval( build_env="test", interactive=False, drop=True, - drop_data=True, dry_run=False, include=[], ) diff --git a/tests/test_approval_modules_snapshots/cdf_apm_base.yaml b/tests/test_approval_modules_snapshots/cdf_apm_base.yaml index 76d92e131..bb6863979 100644 --- a/tests/test_approval_modules_snapshots/cdf_apm_base.yaml +++ b/tests/test_approval_modules_snapshots/cdf_apm_base.yaml @@ -617,19 +617,6 @@ View: space: APM_SourceData version: '1' deleted: - Container: - - externalId: APM_Activity - space: APM_SourceData - type: container - - externalId: APM_Config - space: APM_Config - type: container - - externalId: APM_Notification - space: APM_SourceData - type: container - - externalId: APM_Operation - space: APM_SourceData - type: container DataModel: - externalId: APM_Config space: APM_Config @@ -639,9 +626,6 @@ deleted: space: APM_SourceData type: datamodel version: '1' - Space: - - APM_Config - - APM_SourceData View: - externalId: APM_Activity space: APM_SourceData diff --git a/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml b/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml index d3ac9064e..dff37bfe0 100644 --- a/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml +++ b/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml @@ -1018,23 +1018,11 @@ View: space: apm_simple version: '1' deleted: - Container: - - externalId: Asset - space: apm_simple - type: container - - externalId: WorkItem - space: apm_simple - type: container - - externalId: WorkOrder - space: apm_simple - type: container DataModel: - externalId: apm_simple space: apm_simple type: datamodel version: '1' - Space: - - apm_simple Transformation: - externalId: tr_asset_oid_workmate_apm_simple_load_asset2children - externalId: tr_asset_oid_workmate_apm_simple_load_assets diff --git a/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml b/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml index ed2eadac8..6304233dc 100644 --- a/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml +++ b/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml @@ -23,7 +23,7 @@ DataModel: type: view version: '1' Space: -- description: Imported space +- description: Space for the model in the ExtendedSourceData data model name: ExtendedSourceDataModels space: ExtendedSourceDataModels - description: Space for the instances in the ExtendedSourceData data model @@ -110,17 +110,11 @@ View: space: ExtendedSourceDataModels version: '1' deleted: - Container: - - externalId: Asset - space: ExtendedSourceDataModels - type: container DataModel: - externalId: ExtendedSourceData space: ExtendedSourceDataModels type: datamodel version: '1' - Space: - - cdfTemplateInstances Transformation: - externalId: sync-asset_hierarchy_cdf_asset_source_model View: diff --git a/tests/test_approval_modules_snapshots/cdf_infield_common.yaml b/tests/test_approval_modules_snapshots/cdf_infield_common.yaml index d6b9a6422..8bb21cd38 100644 --- a/tests/test_approval_modules_snapshots/cdf_infield_common.yaml +++ b/tests/test_approval_modules_snapshots/cdf_infield_common.yaml @@ -24,6 +24,3 @@ Space: - description: Space for Infield App Data name: cognite_app_data space: cognite_app_data -deleted: - Space: - - cognite_app_data diff --git a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml index 67c328ea3..7a6c719ed 100644 --- a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml +++ b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml @@ -495,9 +495,6 @@ TransformationSchedule: interval: 0 * * * * isPaused: false deleted: - Space: - - sp_asset_oid_source - - sp_infield_oid_app_data Transformation: - externalId: tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm - externalId: tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm diff --git a/tests/test_approval_modules_snapshots/example_pump_data_model.yaml b/tests/test_approval_modules_snapshots/example_pump_data_model.yaml index a0cc2e07d..35ac3435f 100644 --- a/tests/test_approval_modules_snapshots/example_pump_data_model.yaml +++ b/tests/test_approval_modules_snapshots/example_pump_data_model.yaml @@ -61,7 +61,7 @@ Space: - description: Space for the instances. name: pumpInstanceSpace space: pumpInstanceSpace -- description: Imported space +- description: Space for the Pump Model. name: pumpModelSpace space: pumpModelSpace Transformation: @@ -243,17 +243,11 @@ View: space: pumpModelSpace version: '1' deleted: - Container: - - externalId: Pump - space: pumpModelSpace - type: container DataModel: - externalId: PumpLiftStations space: pumpModelSpace type: datamodel version: '1' - Space: - - pumpInstanceSpace Transformation: - externalId: pump_model-populate-lift_station_pumps_edges - externalId: pump_model-populate-pump_container diff --git a/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml b/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml index 62a4531ca..233483fe0 100644 --- a/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml +++ b/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml @@ -9,6 +9,7 @@ deleted: type: datamodel version: '1' Space: + - ExtendedSourceDataModels - cdfTemplateInstances Transformation: - externalId: sync-asset_hierarchy_cdf_asset_source_model diff --git a/tests/test_approval_modules_snapshots_clean/cdf_infield_location.yaml b/tests/test_approval_modules_snapshots_clean/cdf_infield_location.yaml index 76432dfca..34e165331 100644 --- a/tests/test_approval_modules_snapshots_clean/cdf_infield_location.yaml +++ b/tests/test_approval_modules_snapshots_clean/cdf_infield_location.yaml @@ -1,4 +1,8 @@ deleted: + Node: + - externalId: default_infield_config_minimal + instanceType: node + space: APM_Config Space: - sp_asset_oid_source - sp_infield_oid_app_data diff --git a/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml b/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml index c852f1fe2..cec1332f1 100644 --- a/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml +++ b/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml @@ -10,6 +10,7 @@ deleted: version: '1' Space: - pumpInstanceSpace + - pumpModelSpace Transformation: - externalId: pump_model-populate-lift_station_pumps_edges - externalId: pump_model-populate-pump_container diff --git a/tests/test_cdf_tk/load_data/datamodel_graphql/datamodel.graphql b/tests/test_cdf_tk/load_data/datamodel_graphql/datamodel.graphql deleted file mode 100644 index 4121b8e10..000000000 --- a/tests/test_cdf_tk/load_data/datamodel_graphql/datamodel.graphql +++ /dev/null @@ -1,4 +0,0 @@ -type MyType { - myField: String - myOtherField: Int -} diff --git a/tests/test_cdf_tk/test_load.py b/tests/test_cdf_tk/test_load.py index 974e90bb8..b8039d6d5 100644 --- a/tests/test_cdf_tk/test_load.py +++ b/tests/test_cdf_tk/test_load.py @@ -1,5 +1,4 @@ from pathlib import Path -from typing import Callable from unittest.mock import MagicMock import pytest @@ -12,7 +11,6 @@ FileLoader, Loader, drop_load_resources, - load_datamodel_graphql, ) from cognite_toolkit.cdf_tk.utils import CDFToolConfig @@ -22,29 +20,6 @@ SNAPSHOTS_DIR = THIS_FOLDER / "load_data_snapshots" -@pytest.mark.parametrize( - "load_function, directory, extra_args", - [ - ( - load_datamodel_graphql, - DATA_FOLDER / "datamodel_graphql", - dict(space_name="test_space", model_name="test_model"), - ), - ], -) -def test_loader_function( - load_function: Callable, directory: Path, extra_args: dict, cognite_client_approval: CogniteClient, data_regression -): - cdf_tool = MagicMock(spec=CDFToolConfig) - cdf_tool.verify_client.return_value = cognite_client_approval - cdf_tool.data_set_id = 999 - - load_function(ToolGlobals=cdf_tool, directory=directory, **extra_args) - - dump = cognite_client_approval.dump() - data_regression.check(dump, fullpath=SNAPSHOTS_DIR / f"{directory.name}.yaml") - - @pytest.mark.parametrize( "loader_cls, directory", [ @@ -72,7 +47,7 @@ def test_upsert_data_set(cognite_client_approval: CogniteClient): cdf_tool.verify_capabilities.return_value = cognite_client_approval loader = DataSetsLoader.create_loader(cdf_tool) - loaded = loader.load_resource(DATA_FOLDER / "data_sets" / "1.my_datasets.yaml", cdf_tool, dry_run=False) + loaded = loader.load_resource(DATA_FOLDER / "data_sets" / "1.my_datasets.yaml", dry_run=False) assert len(loaded) == 2 first = DataSet.load(loaded[0].dump()) From 329ac62ea75d7bce149d60e2dea004b22dca7b05 Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Wed, 6 Dec 2023 10:45:53 +0100 Subject: [PATCH 07/90] Dry Run Job (#199) * ci: dry run job" * docs: Improved doc string --- .github/workflows/build.yml | 39 +++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c84389b26..b58616bb9 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -40,3 +40,42 @@ jobs: poetry install - name: Run pytest run: pytest tests + dry-run-demo: + runs-on: ubuntu-latest + environment: dev + env: + CDF_CLUSTER: ${{ secrets.CDF_CLUSTER }} + CDF_PROJECT: ${{ secrets.CDF_PROJECT }} + IDP_CLIENT_ID: ${{ secrets.IDP_CLIENT_ID }} + IDP_CLIENT_SECRET: ${{ secrets.IDP_CLIENT_SECRET }} + IDP_TOKEN_URL: ${{ secrets.IDP_TOKEN_URL }} + name: Dry Run Demo + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v4 + with: + python-version: ${{ env.PYTHON_VERSION }} + - uses: snok/install-poetry@v1 + with: + version: 1.6.1 + virtualenvs-create: false + - name: Install dependencies and build the package + run: | + poetry install + poetry build + - name: Install cdf-tk + run: pip install . + - name: Initialize project + run: cdf-tk init demo_project + - name: "Pre-processing for demo environment" + run: ./demo/preproc.sh + - name: "Build the templates" + run: cdf-tk build --build-dir=./build --env=demo ./demo_project + - name: "Verify and create access rights" + run: cdf-tk auth verify + - name: "Test clean --dry-run" + run: | + cdf-tk clean --env=demo ./build --dry-run + - name: "Deploy the templates --dry-run" + run: | + cdf-tk deploy --drop --env=demo ./build --dry-run From 8330f848dc41d826a2dd0fb06f84fa9ffbd01f86 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Wed, 6 Dec 2023 11:02:43 +0100 Subject: [PATCH 08/90] Fixed bug with single dataset --- cognite_toolkit/cdf_tk/load.py | 4 +++- .../data_sets/dataset.yaml | 12 +++++++++--- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index b52e03bc5..55f3d0a34 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -431,7 +431,9 @@ def fixup_resource(local: DataSet, remote: DataSet) -> DataSet: def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> DataSetList: resource = load_yaml_inject_variables(filepath, {}) - data_sets = list(resource) if isinstance(resource, dict) else resource + + data_sets = [resource] if isinstance(resource, dict) else resource + for data_set in data_sets: if data_set.get("metadata"): for key, value in data_set["metadata"].items(): diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml index 4dfe4fb50..04befa426 100644 --- a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml @@ -2,6 +2,12 @@ externalId: 'ds_asset_{{location_name}}' name: 'asset:{{location_name}}' description: 'Asset data for {{location_name}}' metadata: - consoleSource: '{"names": ["{{source_name}}"]}' - rawTables: '[{"databaseName": "asset_{{location_name}}_{{source_name}}", "tableName": "assets"}]' - transformations: '[{"externalId": "tr_asset_{{location_name}}_{{source_name}}_asset_hierarchy", "type":"jetfire"}]' \ No newline at end of file + consoleSource: + names: + - "{{source_name}}" + rawTables: + - databaseName: asset_{{location_name}}_{{source_name}} + tableName: "assets" + transformations: + - externalId: tr_asset_{{location_name}}_{{source_name}}_asset_hierarchy + type: "jetfire" From c7d34912998cc7766a533fce4b4d8ccdc636e9b3 Mon Sep 17 00:00:00 2001 From: tesande Date: Thu, 7 Dec 2023 12:44:06 +0100 Subject: [PATCH 09/90] Added groups capability to avoid error message in missing location config in infield --- .../auth/infield_checklist_admin_role.group.yaml | 6 ++++++ .../auth/infield_normal_role.group.yaml | 6 ++++++ .../auth/infield_template_admin_role.group.yaml | 6 ++++++ .../auth/infield_viewer_role.group.yaml | 6 ++++++ 4 files changed, 24 insertions(+) diff --git a/cognite_toolkit/modules/cdf_infield_location/auth/infield_checklist_admin_role.group.yaml b/cognite_toolkit/modules/cdf_infield_location/auth/infield_checklist_admin_role.group.yaml index 5fe4ab09a..b6c3f35d9 100644 --- a/cognite_toolkit/modules/cdf_infield_location/auth/infield_checklist_admin_role.group.yaml +++ b/cognite_toolkit/modules/cdf_infield_location/auth/infield_checklist_admin_role.group.yaml @@ -6,6 +6,12 @@ metadata: origin: 'cdf-project-templates' module_version: '{{module_version}}' capabilities: + - groupsAcl: + actions: + - LIST + - READ + scope: + currentuserscope: {} - threedAcl: actions: - READ diff --git a/cognite_toolkit/modules/cdf_infield_location/auth/infield_normal_role.group.yaml b/cognite_toolkit/modules/cdf_infield_location/auth/infield_normal_role.group.yaml index 3022ce153..a58a184d8 100644 --- a/cognite_toolkit/modules/cdf_infield_location/auth/infield_normal_role.group.yaml +++ b/cognite_toolkit/modules/cdf_infield_location/auth/infield_normal_role.group.yaml @@ -6,6 +6,12 @@ metadata: origin: 'cdf-project-templates' module_version: '{{module_version}}' capabilities: + - groupsAcl: + actions: + - LIST + - READ + scope: + currentuserscope: {} - threedAcl: actions: - READ diff --git a/cognite_toolkit/modules/cdf_infield_location/auth/infield_template_admin_role.group.yaml b/cognite_toolkit/modules/cdf_infield_location/auth/infield_template_admin_role.group.yaml index 5caa76c1d..966036372 100644 --- a/cognite_toolkit/modules/cdf_infield_location/auth/infield_template_admin_role.group.yaml +++ b/cognite_toolkit/modules/cdf_infield_location/auth/infield_template_admin_role.group.yaml @@ -6,6 +6,12 @@ metadata: origin: 'cdf-project-templates' module_version: '{{module_version}}' capabilities: + - groupsAcl: + actions: + - LIST + - READ + scope: + currentuserscope: {} - threedAcl: actions: - READ diff --git a/cognite_toolkit/modules/cdf_infield_location/auth/infield_viewer_role.group.yaml b/cognite_toolkit/modules/cdf_infield_location/auth/infield_viewer_role.group.yaml index 87ef1e57f..3ac8d2031 100644 --- a/cognite_toolkit/modules/cdf_infield_location/auth/infield_viewer_role.group.yaml +++ b/cognite_toolkit/modules/cdf_infield_location/auth/infield_viewer_role.group.yaml @@ -6,6 +6,12 @@ metadata: origin: 'cdf-project-templates' module_version: '{{module_version}}' capabilities: + - groupsAcl: + actions: + - LIST + - READ + scope: + currentuserscope: {} - threedAcl: actions: - READ From abdb6d675292b092447ebfe277176fed2e37ed04 Mon Sep 17 00:00:00 2001 From: Jan Inge Bergseth <31886431+BergsethCognite@users.noreply.github.com> Date: Thu, 7 Dec 2023 13:03:20 +0100 Subject: [PATCH 10/90] fixed config with right name standard --- .../auth/asset.extractor.groups.yaml | 2 +- .../auth/asset.processing.groups.yaml | 2 +- .../cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml | 2 +- .../extraction_pipelines/source_asset_valhall_workmate.yaml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.extractor.groups.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.extractor.groups.yaml index 55b4510df..579b8043d 100644 --- a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.extractor.groups.yaml +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.extractor.groups.yaml @@ -11,7 +11,7 @@ capabilities: - WRITE scope: tableScope: { - dbsToTables: {'asset-{{location_name}}-{{source_name}}':{} } + dbsToTables: {'asset_{{location_name}}_{{source_name}}':{} } } - extractionConfigsAcl: actions: diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.processing.groups.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.processing.groups.yaml index 9dacb4f9e..904040c42 100644 --- a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.processing.groups.yaml +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.processing.groups.yaml @@ -11,7 +11,7 @@ capabilities: - WRITE scope: tableScope: { - dbsToTables: {'asset-{{location_name}}-{{source_name}}':{} } + dbsToTables: {'asset_{{location_name}}_{{source_name}}':{} } } - transformationsAcl: actions: diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml index 04befa426..d2ffbaffb 100644 --- a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml @@ -1,4 +1,4 @@ -externalId: 'ds_asset_{{location_name}}' +externalId: 'ds_asset_{{location_name}}' name: 'asset:{{location_name}}' description: 'Asset data for {{location_name}}' metadata: diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml index f8363a1dc..2e75ede3a 100644 --- a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml @@ -4,7 +4,7 @@ name: 'src:asset:{{location_name}}:{{source_name}}' dataSetExternalId: 'ds_asset_{{location_name}}' description: 'Asset source extraction pipeline with configuration for DB extractor reading data from {{location_name}}:{{source_name}}' rawTables: - - dbName: 'asset-{{location_name}}-{{source_name}}' + - dbName: 'asset_{{location_name}}_{{source_name}}' tableName: 'assets' source: '{{source_name}}' documentation: "The DB Extractor is a general database extractor that connects to a database, executes one or several queries and sends the result to CDF RAW.\n\nThe extractor connects to a database over ODBC, which means that you need an ODBC driver for your database. If you are running the Docker version of the extractor, ODBC drivers for MySQL, MS SQL, PostgreSql and Oracle DB are preinstalled in the image. See the example config for details on connection strings for these. If you are running the Windows exe version of the extractor, you must provide an ODBC driver yourself. These are typically provided by the database vendor.\n\nFurther documentation is available [here](./docs/documentation.md)\n\nFor information on development, consider the following guides:\n\n * [Development guide](guides/development.md)\n * [Release guide](guides/release.md)" \ No newline at end of file From bdaeadbb34709e9c967a396f073ccea7745b4353 Mon Sep 17 00:00:00 2001 From: Jan Inge Bergseth <31886431+BergsethCognite@users.noreply.github.com> Date: Thu, 7 Dec 2023 14:16:11 +0100 Subject: [PATCH 11/90] Update default.config.yaml --- .../cdf_data_pipeline_asset_valhall/default.config.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/default.config.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/default.config.yaml index 73198c351..1cbad60dc 100644 --- a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/default.config.yaml +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/default.config.yaml @@ -12,13 +12,13 @@ module_version: '1' source_name: workmate asset_dataset: ds_asset_oid -asset_raw_input_db: asset-oid-workmate +asset_raw_input_db: asset_oid_workmate asset_raw_input_table: assets # source ID from Azure AD for the corresponding groups, ex 'c74797ce-9191-4a4a-9186-8fe21c54c3de' -asset_location_extractor_group_source_id: c74797ce-c49b-4ada-9186-8fe21c54c3de -asset_location_processing_group_source_id: c74797ce-c49b-4ada-9186-8fe21c54c3de -asset_location_read_group_source_id: c74797ce-c49b-4ada-9186-8fe21c54c3de +asset_location_extractor_group_source_id: +asset_location_processing_group_source_id: +asset_location_read_group_source_id: # Transformation credentials From 110aec5d09133d58fb28d20282fbdde762eedb28 Mon Sep 17 00:00:00 2001 From: Greger Wedel Date: Fri, 8 Dec 2023 11:56:02 +0100 Subject: [PATCH 12/90] Remove naming docs --- README.md | 148 ------------------------------------------------------ 1 file changed, 148 deletions(-) diff --git a/README.md b/README.md index 94fd1b0c8..64f601fb1 100644 --- a/README.md +++ b/README.md @@ -20,154 +20,6 @@ The templates and the `cdf-tk` tool are currently in ALPHA. The scope in alpha i Performance Management focused on Infield (Digital Operator Rounds). The templates and tooling will be continously improved throughout moving towards beta and general availability. -Below is an overview of the scope of what can be governed through using these templates: - -![Overview of project templates](./static/overview.png "Overview") - -## Quickstart - -To install the `cdf-tk` tool, you need a working Python installation >=3.9 (recommended 3.11). -Run: `pip install cognite-toolkit` - -The `cdf-tk` tool is available as a command line tool. Run `cdf-tk --help` to see the available commands. - -## Naming standards - -A common structure on naming different CDF resource types and configuration are important from day one. Easy to use and understandable naming standard makes it easy to navigate and search on all components in a project as is grows in data sources, code, configuration, supported solutions and human resources working with and using the CDF solutions. - -**Separation tokens** - -* For external IDs the separation token is **’_’** (underscore) - this token works for all OS, when external ID matches usage of files -* For names the separation token is **’:’ or '_'** (colon or underscore) - these tokens matches usage in other CLI tools ang gives good readability - -### Example usage of naming standard - -In the example below we are setting up a project based on the Open Industry Data (OID), that originates from the Valhall oil rig. Hence the example location below is *oid* - -* the **location_name** = oid -* The different data sources are: - * workmate (asset & workorder data) - * fileshare (files and 3D) - * PI (time series / data points) - -```txt -CDF project -│ -├── Data Sets: -│   ├── extId: ds_asset_oid ── name: asset:oid -│   │ ├── Extraction Pipelines: -│   │ │ └── extId: ep_src_asset_oid_workmate ── name: src:asset:oid:workmate -│ │ │ -│   │ ├── RAW DB/tables: -│   │ │ └── DB: asset_oid_workmate ── table: assets -│ │ │ -│   │ ├── Transformations: -│   │ │ └── extId: tr_asset_oid_workmate_asset_hierarchy ── name: asset:oid:workmate:asset_hierarchy -│ │ │ -│   │ └── Autorisation groups: -│   │ ├── id: asset:oid:extractor -│   │ ├── id: asset:oid:prosessing -│   │ └── id: asset:oid:read -│ │  -│   ├── extId: ds_files_oid ── name: files:oid -│   │ ├── Extraction Pipelines: -│   │ │ ├── extId: ep_src_files_oid_fileshare ── name: src:files:oid:fileshare -│   │ │ └── extId: ep_ctx_files_oid_fileshare:annotation ── name: ctx:files:oid:fileshare:annotation -│ │ │ -│   │ ├── RAW DB/tables: -│   │ │ └── DB: files_oid_fileshare ── table: file_metadata -│ │ │ -│   │ ├── Transformations: -│   │ │ └── extId: tr_file_oid_fileshare_file_metadata ── name: file:oid:metadata:fileshare:file_metadata -│ │ │ -│   │ ├── Functions: -│   │ │ └── extId: fu_files_oid_fileshare_annotation ── name: files:oid:fileshare:annotation -│ │ │ -│   │ └── Autorisation groups: -│   │ ├── id: files:oid:extractor -│   │ ├── id: files:oid:prosessing -│   │ └── id: files:oid:read -│ │  -│   ├── extId: ds_workorder_oid ── name: workorder:oid -│   │ ├── ... -│ │  ... -│ │ -│   ├── extId: ds_timeseries_oid ── name: timeseries:oid -│   │ ├── ... -│ │ ...  -│ │ -│   ├── extId: ds_3d_oid ── name: 3d:oid -│   │ ├── ... -│ │ ...  -│  -└── Spaces: -   └── extId: sp_apm_oid ── name: oid -``` - -### Naming elements - -* **Data Type:** asset, timeseries, workorder, files, 3d,... (use what is relevant for project) -* **Source:** Source system where data originates from (ex, SAP, Workmate, Aveva, PI, Fileshare, SharePoint,..) -* **Location:** Location for Asset / System / Plant / installation -* **Pipeline Type:** src = source data, ctx = contextualization, uc = use case, ... -* **Operation Type:** Type of operation/action/functionality in transformation or CDF function -* **Access Type:** Type of access used in authorization groups (ex: extractor, processing, read, ...) - -**Data sets:** - -```txt -External ID: ds__ -Name: : -Ex: ds_asset_oid / asset:oid -``` - -**Extraction Pipelines:** - -```txt -External ID: ep____ -Name: :::: -Ex: ep_src_asset_oid_workmate / src:asset:oid:workmate -``` - -**RAW DB/tables:** - -```txt -DB: __ -Ex: asset_oid_workmate -Table: use name from source, or other describing name -``` - -**Transformations:** - -```txt -External ID: tr____ -Name: ::: -Ex: tr_asset_oid_workmate_asset_hierarchy / asset:oid:workmate:asset_hierarchy -``` - -**Functions:** - -```txt -External ID: fu____ -Name: ::: -Ex: fu_files_oid_fileshare_annotation / files:oid:fileshare:annotation -``` - -**Authorization groups:** - -```txt -Name: :: -Ex: asset:valhall:extractor / asset:valhall:processing / asset:valhall:read -``` - -**Data Model Spaces:** - -```txt -External ID: dm__ -Name: : -Ex: dm_apm_oid / apm:oid -``` - ## For more information More details about the tool can be found at From ee93d652d2a2152d82630cd134ca0d0a28c294a1 Mon Sep 17 00:00:00 2001 From: Greger Wedel Date: Fri, 8 Dec 2023 11:56:24 +0100 Subject: [PATCH 13/90] Get rid of empty idp module --- cognite_toolkit/common/cdf_idp_default/README.md | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 cognite_toolkit/common/cdf_idp_default/README.md diff --git a/cognite_toolkit/common/cdf_idp_default/README.md b/cognite_toolkit/common/cdf_idp_default/README.md deleted file mode 100644 index 11054790b..000000000 --- a/cognite_toolkit/common/cdf_idp_default/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Module: cdf_idf_default - -This module configures a default identity provider for a project. It is meant to be used as part of -project implementation. This module should be swapped out for a module that configures the -specific identity provider for the project. From 5d0b469c4e694579aeaa78a78a9929e09c7a9265 Mon Sep 17 00:00:00 2001 From: Greger Wedel Date: Fri, 8 Dec 2023 11:56:33 +0100 Subject: [PATCH 14/90] Fix wrong package config --- cognite_toolkit/default.packages.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cognite_toolkit/default.packages.yaml b/cognite_toolkit/default.packages.yaml index 593ca9922..f1ea43933 100644 --- a/cognite_toolkit/default.packages.yaml +++ b/cognite_toolkit/default.packages.yaml @@ -13,11 +13,11 @@ packages: - cdf_apm_simple_data_model cdf_demo_infield: - cdf_auth_readwrite_all + - cdf_oid_example_data - cdf_apm_base - cdf_infield_common - cdf_infield_location cdf_infield: - - cdf_idp_default - cdf_auth_readwrite_all - cdf_apm_base - cdf_infield_common From a7e7bfaa70e8af2cdaa78b940c5bcd6ad5384856 Mon Sep 17 00:00:00 2001 From: Greger Wedel Date: Fri, 8 Dec 2023 11:57:13 +0100 Subject: [PATCH 15/90] Use dataSetExternalId in files --- cognite_toolkit/cdf_tk/load.py | 8 +++++--- .../examples/cdf_oid_example_data/files/files.yaml | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 0ff88840a..944a297a5 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -875,9 +875,11 @@ def delete(self, ids: Sequence[str], drop_data: bool) -> int: def load_resource(self, filepath: Path, dry_run: bool) -> FileMetadata | FileMetadataList: try: - files = FileMetadataList( - [FileMetadata.load(load_yaml_inject_variables(filepath, self.ToolGlobals.environment_variables()))] - ) + resource = load_yaml_inject_variables(filepath, self.ToolGlobals.environment_variables()) + if resource.get("dataSetExternalId") is not None: + ds_external_id = resource.pop("dataSetExternalId") + resource["dataSetId"] = self.ToolGlobals.verify_dataset(ds_external_id) if not dry_run else -1 + files = FileMetadataList([FileMetadata.load(resource)]) except Exception: files = FileMetadataList.load( load_yaml_inject_variables(filepath, self.ToolGlobals.environment_variables()) diff --git a/cognite_toolkit/examples/cdf_oid_example_data/files/files.yaml b/cognite_toolkit/examples/cdf_oid_example_data/files/files.yaml index 955d3a7fd..8aea2ca8a 100644 --- a/cognite_toolkit/examples/cdf_oid_example_data/files/files.yaml +++ b/cognite_toolkit/examples/cdf_oid_example_data/files/files.yaml @@ -1,3 +1,3 @@ - externalId: {{source_files}}_$FILENAME - dataSetId: ds_files_{{default_location}} + dataSetExternalId: ds_files_{{default_location}} source: {{source_files}} From 3751fe6bd61e33f33eb34117c7dddc3fcc77206e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 8 Dec 2023 12:52:19 +0100 Subject: [PATCH 16/90] Pipeline config ready for testing --- cognite_toolkit/cdf_tk/load.py | 52 +++++++++++++++++-- .../source_asset_valhall_workmate.config} | 2 +- .../source_asset_valhall_workmate.yaml | 3 ++ .../source_asset_valhall_workmate.config.yaml | 39 ++++++++++++++ 4 files changed, 90 insertions(+), 6 deletions(-) rename cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/{extraction_pipelines/source_asset_valhall_workmate_config.json => extraction_pipeline_configs/source_asset_valhall_workmate.config} (95%) create mode 100644 cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.yaml create mode 100644 cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 55f3d0a34..191f0657d 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -31,6 +31,7 @@ DataSet, DataSetList, ExtractionPipeline, + ExtractionPipelineConfig, ExtractionPipelineList, FileMetadata, FileMetadataList, @@ -437,7 +438,7 @@ def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: boo for data_set in data_sets: if data_set.get("metadata"): for key, value in data_set["metadata"].items(): - data_set["metadata"][key] = json.dumps(value) if isinstance(value, dict) else value + data_set["metadata"][key] = json.dumps(value) return DataSetList.load(data_sets) def create( @@ -733,18 +734,23 @@ def delete(self, ids: Sequence[str]) -> int: return 0 def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> ExtractionPipeline: + if filepath.name.endswith(".config.yaml"): + return None + resource = load_yaml_inject_variables(filepath, {}) if resource.get("dataSetExternalId") is not None: ds_external_id = resource.pop("dataSetExternalId") resource["dataSetId"] = ToolGlobals.verify_dataset(ds_external_id) if not dry_run else -1 + return ExtractionPipeline.load(resource) def create( self, items: Sequence[T_Resource], ToolGlobals: CDFToolConfig, drop: bool, filepath: Path ) -> T_ResourceList | None: - try: - return ExtractionPipelineList(self.client.extraction_pipelines.create(items)) + extractionPipelineList: ExtractionPipelineList = None + try: + extractionPipelineList = ExtractionPipelineList(self.client.extraction_pipelines.create(items)) except CogniteDuplicatedError as e: if len(e.duplicated) < len(items): for dup in e.duplicated: @@ -753,12 +759,47 @@ def create( if item.external_id == ext_id: items.remove(item) try: - return ExtractionPipelineList(self.client.extraction_pipelines.create(items)) + extractionPipelineList = ExtractionPipelineList(self.client.extraction_pipelines.create(items)) except Exception as e: print(f"[bold red]ERROR:[/] Failed to create extraction pipelines.\n{e}") ToolGlobals.failed = True return None - return None + + file_name = filepath.stem.split(".", 2)[1] + config_file_name = f"{file_name}.config.yaml" + config_file = next( + ( + file + for file in Path(filepath.parent).iterdir() + if file.is_file() and file.name.endswith(config_file_name) + ), + None, + ) + + if not config_file.exists(): + print( + f" [bold yellow]WARNING:[/] no config file for extraction pipeline found. Expected to find {config_file_name} in same folder as {file_name}" + ) + return extractionPipelineList + + resources = load_yaml_inject_variables(config_file, {}) + resources = [resources] if isinstance(resources, dict) else resources + + for resource in resources: + extractionPipelineConfig = ExtractionPipelineConfig.load( + { + "externalId": resource.get("externalId"), + "description": resource.get("description"), + "config": json.dumps(resource.get("config", ""), indent=4), + } + ) + try: + self.client.extraction_pipelines.config.create(extractionPipelineConfig) + except Exception as e: + print(f"[bold red]ERROR:[/] Failed to create extraction pipeline config.\n{e}") + ToolGlobals.failed = True + + return extractionPipelineList @final @@ -865,6 +906,7 @@ def drop_load_resources( filepaths = [file for file in path.glob("**/*")] items = [loader.load_resource(f, ToolGlobals, dry_run) for f in filepaths] + items = [item for item in items if item is not None] nr_of_batches = len(items) nr_of_items = sum(len(item) if isinstance(item, Sized) else 1 for item in items) nr_of_deleted = 0 diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate_config.json b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.config similarity index 95% rename from cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate_config.json rename to cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.config index fecae60a6..de0eef79a 100644 --- a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate_config.json +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.config @@ -32,4 +32,4 @@ queries: type: raw database: "db-extractor" table: "postgres" - primary-key: "{id}" + primary-key: "{id}" \ No newline at end of file diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.yaml new file mode 100644 index 000000000..865a3cf1c --- /dev/null +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.yaml @@ -0,0 +1,3 @@ +--- +externalId: 'ep_src_asset_{{location_name}}_{{source_name}}' +description: 'Asset source extraction pipeline with configuration for DB extractor reading data from {{location_name}}:{{source_name}}' \ No newline at end of file diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml new file mode 100644 index 000000000..58b055fd9 --- /dev/null +++ b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml @@ -0,0 +1,39 @@ +--- +externalId: 'ep_src_asset_{{location_name}}_{{source_name}}' +description: 'DB extractor config reading data from {{location_name}}:{{source_name}}' +config: + logger: + console: + level: INFO + file: + level: INFO + path: "file.log" + # List of databases + databases: + - type: odbc + name: postgres + connection-string: "DSN={MyPostgresDsn}" + # List of queries + queries: + - name: test-postgres + database: postgres + query: > + SELECT + + * + FROM + + mytable + WHERE + + {incremental_field} >= '{start_at}' + ORDER BY + + {incremental_field} ASC + incremental-field: "id" + initial-start: 0 + destination: + type: raw + database: "db-extractor" + table: "postgres" + primary-key: "{id}" From 37a8a4f33d777a74d4857b23c4f828ccb587dbad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 8 Dec 2023 13:03:21 +0100 Subject: [PATCH 17/90] savepoint --- .../source_asset_valhall_workmate.config | 35 ------------------- .../source_asset_valhall_workmate.yaml | 3 -- cognite_toolkit/local.yaml | 8 ++--- 3 files changed, 2 insertions(+), 44 deletions(-) delete mode 100644 cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.config delete mode 100644 cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.yaml diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.config b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.config deleted file mode 100644 index de0eef79a..000000000 --- a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.config +++ /dev/null @@ -1,35 +0,0 @@ -logger: - console: - level: INFO - file: - level: INFO - path: "file.log" -# List of databases -databases: - - type: odbc - name: postgres - connection-string: "DSN={MyPostgresDsn}" -# List of queries -queries: - - name: test-postgres - database: postgres - query: > - SELECT - - * - FROM - - mytable - WHERE - - {incremental_field} >= '{start_at}' - ORDER BY - - {incremental_field} ASC - incremental-field: "id" - initial-start: 0 - destination: - type: raw - database: "db-extractor" - table: "postgres" - primary-key: "{id}" \ No newline at end of file diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.yaml b/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.yaml deleted file mode 100644 index 865a3cf1c..000000000 --- a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipeline_configs/source_asset_valhall_workmate.yaml +++ /dev/null @@ -1,3 +0,0 @@ ---- -externalId: 'ep_src_asset_{{location_name}}_{{source_name}}' -description: 'Asset source extraction pipeline with configuration for DB extractor reading data from {{location_name}}:{{source_name}}' \ No newline at end of file diff --git a/cognite_toolkit/local.yaml b/cognite_toolkit/local.yaml index 58dbd3a29..e3fd5e7c6 100644 --- a/cognite_toolkit/local.yaml +++ b/cognite_toolkit/local.yaml @@ -24,14 +24,10 @@ demo: - cdf_demo_infield - cdf_oid_example_data local: - project: -dev + project: trial-572dca111144a5196a6b1 type: dev deploy: - - cdf_auth_readwrite_all - - cdf_apm_base - - cdf_oid_example_data - - cdf_infield_common - - cdf_infield_location + - cdf_data_pipeline_asset_valhall dev: project: -dev type: dev From 0de140a9cf89f3cafdaa390cf4cdd83bcec66d18 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 8 Dec 2023 14:10:54 +0100 Subject: [PATCH 18/90] removed type hint --- cognite_toolkit/cdf_tk/load.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 1e591d057..177efa715 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -835,7 +835,7 @@ def load_resource(self, filepath: Path, dry_run: bool) -> ExtractionPipeline: return ExtractionPipeline.load(resource) def create(self, items: Sequence[T_Resource], drop: bool, filepath: Path) -> T_ResourceList | None: - extractionPipelineList: ExtractionPipelineList = None + extractionPipelineList = None try: extractionPipelineList = ExtractionPipelineList(self.client.extraction_pipelines.create(items)) From cd58e6011d3d790eaff85859bf0df7741c47b963 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 8 Dec 2023 14:14:03 +0100 Subject: [PATCH 19/90] reverting local.yaml --- cognite_toolkit/local.yaml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/cognite_toolkit/local.yaml b/cognite_toolkit/local.yaml index e3fd5e7c6..58dbd3a29 100644 --- a/cognite_toolkit/local.yaml +++ b/cognite_toolkit/local.yaml @@ -24,10 +24,14 @@ demo: - cdf_demo_infield - cdf_oid_example_data local: - project: trial-572dca111144a5196a6b1 + project: -dev type: dev deploy: - - cdf_data_pipeline_asset_valhall + - cdf_auth_readwrite_all + - cdf_apm_base + - cdf_oid_example_data + - cdf_infield_common + - cdf_infield_location dev: project: -dev type: dev From 8cfd2da8519d611ec887c0677e085054a0ccf74a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 8 Dec 2023 15:44:03 +0100 Subject: [PATCH 20/90] lint --- cognite_toolkit/cdf_tk/load.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 177efa715..2ed244220 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -26,6 +26,7 @@ from typing import Any, Generic, Literal, TypeVar, Union, final import pandas as pd +import yaml from cognite.client import CogniteClient from cognite.client.data_classes import ( DataSet, @@ -834,7 +835,7 @@ def load_resource(self, filepath: Path, dry_run: bool) -> ExtractionPipeline: return ExtractionPipeline.load(resource) - def create(self, items: Sequence[T_Resource], drop: bool, filepath: Path) -> T_ResourceList | None: + def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path) -> ExtractionPipelineList: extractionPipelineList = None try: @@ -878,7 +879,7 @@ def create(self, items: Sequence[T_Resource], drop: bool, filepath: Path) -> T_R { "externalId": resource.get("externalId"), "description": resource.get("description"), - "config": json.dumps(resource.get("config", ""), indent=4), + "config": yaml.dump(resource.get("config", ""), indent=4), } ) try: From bd9c736ac35b868862b565104fce776d26ed4ebd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 8 Dec 2023 15:48:42 +0100 Subject: [PATCH 21/90] tests regen --- .../cdf_data_pipeline_asset_valhall.yaml | 132 ++++++++++++++++++ .../cdf_infield_common.yaml | 2 +- .../cdf_data_pipeline_asset_valhall.yaml | 3 + 3 files changed, 136 insertions(+), 1 deletion(-) create mode 100644 tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml create mode 100644 tests/test_approval_modules_snapshots_clean/cdf_data_pipeline_asset_valhall.yaml diff --git a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml new file mode 100644 index 000000000..d709e3cc8 --- /dev/null +++ b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml @@ -0,0 +1,132 @@ +DataSet: +- description: Asset data for oid + externalId: ds_asset_oid + metadata: + consoleSource: '{"names": ["workmate"]}' + rawTables: '[{"databaseName": "asset_oid_workmate", "tableName": "assets"}]' + transformations: '[{"externalId": "tr_asset_oid_workmate_asset_hierarchy", "type": + "jetfire"}]' + name: asset:oid +Group: +- capabilities: + - rawAcl: + actions: + - READ + - WRITE + scope: + tableScope: + dbsToTables: + asset_oid_workmate: + tables: [] + - extractionConfigsAcl: + actions: + - READ + scope: + extractionPipelineScope: + ids: + - 1 + metadata: + module_version: '1' + origin: cdf-project-templates + name: gp_asset_oid_extractor + sourceId: +- capabilities: + - transformationsAcl: + actions: + - READ + - WRITE + scope: + all: {} + - sessionsAcl: + actions: + - LIST + - CREATE + - DELETE + scope: + all: {} + metadata: + module_version: '1' + origin: cdf-project-templates + name: gp_asset_oid_processing + sourceId: +- capabilities: + - rawAcl: + actions: + - READ + - WRITE + scope: + tableScope: + dbsToTables: + asset_oid_workmate: + tables: [] + - assetsAcl: + actions: + - READ + - WRITE + scope: + datasetScope: + ids: + - 42 + metadata: + module_version: '1' + origin: cdf-project-templates + name: gp_asset_oid_processing + sourceId: +- capabilities: + - assetsAcl: + actions: + - READ + scope: + datasetScope: + ids: + - 42 + metadata: + module_version: '1' + origin: cdf-project-templates + name: gp_asset_oid_read + sourceId: +Transformation: +- conflictMode: upsert + dataSetId: 999 + destination: + type: asset_hierarchy + destinationOidcCredentials: + audience: ${IDP_AUDIENCE} + cdfProjectName: ${CDF_PROJECT} + clientId: ${IDP_CLIENT_ID} + clientSecret: ${IDP_CLIENT_SECRET} + scopes: ${IDP_SCOPES} + tokenUri: ${IDP_TOKEN_URL} + externalId: tr_asset_oid_workmate_asset_hierarchy + ignoreNullFields: true + isPublic: true + name: asset:oid:workmate:asset_hierarchy + ownerIsCurrentUser: true + query: "--\n-- Create Asset Hierarchy using Transformation\n--\n-- Input data from\ + \ RAW DB table (using example data)\n--\n-- Root node has parentExternal id =\ + \ ''\n-- Transformation is connected to asset data set\n-- All metadata expect\ + \ selected fileds are added to metadata\n--\nSELECT \n sourceDb || ':' || tag\ + \ as externalId,\n if(parentTag is null, \n '', \n sourceDb\ + \ || ':' ||parentTag) as parentExternalId,\n tag \ + \ as name,\n sourceDb as source,\n description,\n dataset_id('ds_asset_oid')\ + \ as dataSetId,\n to_metadata_except(\n array(\"sourceDb\", \"parentTag\"\ + , \"description\"), *) \n as metadata\nFROM \n\ + \ `asset_oid_workmate`.`assets`\n" + schedule: + externalId: tr_asset_oid_workmate_asset_hierarchy + interval: 0 * * * * + isPaused: true + sourceOidcCredentials: + audience: ${IDP_AUDIENCE} + cdfProjectName: ${CDF_PROJECT} + clientId: ${IDP_CLIENT_ID} + clientSecret: ${IDP_CLIENT_SECRET} + scopes: ${IDP_SCOPES} + tokenUri: ${IDP_TOKEN_URL} +TransformationSchedule: +- externalId: tr_asset_oid_workmate_asset_hierarchy + interval: 0 * * * * + isPaused: true +deleted: + Transformation: + - externalId: tr_asset_oid_workmate_asset_hierarchy diff --git a/tests/test_approval_modules_snapshots/cdf_infield_common.yaml b/tests/test_approval_modules_snapshots/cdf_infield_common.yaml index 8bb21cd38..5a2c0557f 100644 --- a/tests/test_approval_modules_snapshots/cdf_infield_common.yaml +++ b/tests/test_approval_modules_snapshots/cdf_infield_common.yaml @@ -19,7 +19,7 @@ Group: metadata: origin: cdf-project-templates name: applications-configuration - sourceId: + sourceId: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e Space: - description: Space for Infield App Data name: cognite_app_data diff --git a/tests/test_approval_modules_snapshots_clean/cdf_data_pipeline_asset_valhall.yaml b/tests/test_approval_modules_snapshots_clean/cdf_data_pipeline_asset_valhall.yaml new file mode 100644 index 000000000..d04057a23 --- /dev/null +++ b/tests/test_approval_modules_snapshots_clean/cdf_data_pipeline_asset_valhall.yaml @@ -0,0 +1,3 @@ +deleted: + Transformation: + - externalId: tr_asset_oid_workmate_asset_hierarchy From 613a6887f23d62109cf0735d0496b02ec7d714d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 8 Dec 2023 15:52:49 +0100 Subject: [PATCH 22/90] tests regen --- tests/test_approval_modules_snapshots/cdf_infield_common.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_approval_modules_snapshots/cdf_infield_common.yaml b/tests/test_approval_modules_snapshots/cdf_infield_common.yaml index 5a2c0557f..8bb21cd38 100644 --- a/tests/test_approval_modules_snapshots/cdf_infield_common.yaml +++ b/tests/test_approval_modules_snapshots/cdf_infield_common.yaml @@ -19,7 +19,7 @@ Group: metadata: origin: cdf-project-templates name: applications-configuration - sourceId: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e + sourceId: Space: - description: Space for Infield App Data name: cognite_app_data From a42d9cc54737711c6b429ecfe1754270df751756 Mon Sep 17 00:00:00 2001 From: Greger Teigre Wedel Date: Mon, 11 Dec 2023 14:27:17 +0100 Subject: [PATCH 23/90] Package upgrades (#206) * Package upgrades --- .github/workflows/build.yml | 6 +- .github/workflows/demo.yml | 2 +- .github/workflows/release.yaml | 8 +- .pre-commit-config.yaml | 2 +- poetry.lock | 262 ++++++++++++++++----------------- pyproject.toml | 6 +- 6 files changed, 143 insertions(+), 143 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index b58616bb9..7a8b7f417 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - uses: snok/install-poetry@v1 @@ -28,7 +28,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - uses: snok/install-poetry@v1 @@ -52,7 +52,7 @@ jobs: name: Dry Run Demo steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - uses: snok/install-poetry@v1 diff --git a/.github/workflows/demo.yml b/.github/workflows/demo.yml index 27f448e48..ff967fd5e 100644 --- a/.github/workflows/demo.yml +++ b/.github/workflows/demo.yml @@ -18,7 +18,7 @@ jobs: name: Loadmaster steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - uses: snok/install-poetry@v1 diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 69396c811..92ca49445 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - uses: snok/install-poetry@v1 @@ -28,7 +28,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - uses: snok/install-poetry@v1 @@ -48,7 +48,7 @@ jobs: name: Test build templates steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: 3.11 - uses: snok/install-poetry@v1 @@ -73,7 +73,7 @@ jobs: needs: [lint, test, build] steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - uses: snok/install-poetry@v1 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4369b6dd5..49c4ee6cf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,7 +13,7 @@ repos: - --fixable=E,W,F,I,T,RUF,TID,UP - --target-version=py39 - --exclude=cognite/client/_proto,cognite/client/_proto_legacy - rev: v0.1.5 + rev: v0.1.7 - repo: https://github.com/psf/black rev: 23.11.0 diff --git a/poetry.lock b/poetry.lock index d21def4d1..5b609ecc4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -212,13 +212,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cognite-sdk" -version = "7.3.3" +version = "7.5.4" description = "Cognite Python SDK" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "cognite_sdk-7.3.3-py3-none-any.whl", hash = "sha256:3d7ff4d01d3bd777aa26b77dab4a8486c6522796e9756099f65d81c3956499f9"}, - {file = "cognite_sdk-7.3.3.tar.gz", hash = "sha256:1a8eaca868019bcd2b3c792331e88f3e5d017ce3197115c3b85a44fa71f9c2d5"}, + {file = "cognite_sdk-7.5.4-py3-none-any.whl", hash = "sha256:7c9f87ff81565e284630c9b6b7c8b3c642a98221f89a9b4894e53cf7bb5f9ca8"}, + {file = "cognite_sdk-7.5.4.tar.gz", hash = "sha256:489a121278c68c27993f4e610577e52d93f1028a30ce3e9c05f5c9f89568b196"}, ] [package.dependencies] @@ -254,34 +254,34 @@ files = [ [[package]] name = "cryptography" -version = "41.0.5" +version = "41.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"}, - {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"}, - {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, - {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"}, + {file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"}, + {file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"}, + {file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"}, + {file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"}, + {file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"}, + {file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"}, + {file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"}, + {file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"}, + {file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"}, ] [package.dependencies] @@ -391,13 +391,13 @@ files = [ [[package]] name = "identify" -version = "2.5.32" +version = "2.5.33" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.32-py2.py3-none-any.whl", hash = "sha256:0b7656ef6cba81664b783352c73f8c24b39cf82f926f78f4550eda928e5e0545"}, - {file = "identify-2.5.32.tar.gz", hash = "sha256:5d9979348ec1a21c768ae07e0a652924538e8bce67313a73cb0f681cf08ba407"}, + {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, + {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, ] [package.extras] @@ -405,31 +405,31 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] name = "importlib-metadata" -version = "6.8.0" +version = "7.0.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, + {file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"}, + {file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] @@ -559,13 +559,13 @@ files = [ [[package]] name = "msal" -version = "1.25.0" -description = "The Microsoft Authentication Library (MSAL) for Python library" +version = "1.26.0" +description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." optional = false python-versions = ">=2.7" files = [ - {file = "msal-1.25.0-py2.py3-none-any.whl", hash = "sha256:386df621becb506bc315a713ec3d4d5b5d6163116955c7dde23622f156b81af6"}, - {file = "msal-1.25.0.tar.gz", hash = "sha256:f44329fdb59f4f044c779164a34474b8a44ad9e4940afbc4c3a3a2bbe90324d9"}, + {file = "msal-1.26.0-py2.py3-none-any.whl", hash = "sha256:be77ba6a8f49c9ff598bbcdc5dfcf1c9842f3044300109af738e8c3e371065b5"}, + {file = "msal-1.26.0.tar.gz", hash = "sha256:224756079fe338be838737682b49f8ebc20a87c1c5eeaf590daae4532b83de15"}, ] [package.dependencies] @@ -578,38 +578,38 @@ broker = ["pymsalruntime (>=0.13.2,<0.14)"] [[package]] name = "mypy" -version = "1.7.0" +version = "1.7.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5da84d7bf257fd8f66b4f759a904fd2c5a765f70d8b52dde62b521972a0a2357"}, - {file = "mypy-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a3637c03f4025f6405737570d6cbfa4f1400eb3c649317634d273687a09ffc2f"}, - {file = "mypy-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b633f188fc5ae1b6edca39dae566974d7ef4e9aaaae00bc36efe1f855e5173ac"}, - {file = "mypy-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d6ed9a3997b90c6f891138e3f83fb8f475c74db4ccaa942a1c7bf99e83a989a1"}, - {file = "mypy-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:1fe46e96ae319df21359c8db77e1aecac8e5949da4773c0274c0ef3d8d1268a9"}, - {file = "mypy-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:df67fbeb666ee8828f675fee724cc2cbd2e4828cc3df56703e02fe6a421b7401"}, - {file = "mypy-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a79cdc12a02eb526d808a32a934c6fe6df07b05f3573d210e41808020aed8b5d"}, - {file = "mypy-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f65f385a6f43211effe8c682e8ec3f55d79391f70a201575def73d08db68ead1"}, - {file = "mypy-1.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e81ffd120ee24959b449b647c4b2fbfcf8acf3465e082b8d58fd6c4c2b27e46"}, - {file = "mypy-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:f29386804c3577c83d76520abf18cfcd7d68264c7e431c5907d250ab502658ee"}, - {file = "mypy-1.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:87c076c174e2c7ef8ab416c4e252d94c08cd4980a10967754f91571070bf5fbe"}, - {file = "mypy-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6cb8d5f6d0fcd9e708bb190b224089e45902cacef6f6915481806b0c77f7786d"}, - {file = "mypy-1.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93e76c2256aa50d9c82a88e2f569232e9862c9982095f6d54e13509f01222fc"}, - {file = "mypy-1.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cddee95dea7990e2215576fae95f6b78a8c12f4c089d7e4367564704e99118d3"}, - {file = "mypy-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:d01921dbd691c4061a3e2ecdbfbfad029410c5c2b1ee88946bf45c62c6c91210"}, - {file = "mypy-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:185cff9b9a7fec1f9f7d8352dff8a4c713b2e3eea9c6c4b5ff7f0edf46b91e41"}, - {file = "mypy-1.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7b1e399c47b18feb6f8ad4a3eef3813e28c1e871ea7d4ea5d444b2ac03c418"}, - {file = "mypy-1.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc9fe455ad58a20ec68599139ed1113b21f977b536a91b42bef3ffed5cce7391"}, - {file = "mypy-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d0fa29919d2e720c8dbaf07d5578f93d7b313c3e9954c8ec05b6d83da592e5d9"}, - {file = "mypy-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b53655a295c1ed1af9e96b462a736bf083adba7b314ae775563e3fb4e6795f5"}, - {file = "mypy-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1b06b4b109e342f7dccc9efda965fc3970a604db70f8560ddfdee7ef19afb05"}, - {file = "mypy-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bf7a2f0a6907f231d5e41adba1a82d7d88cf1f61a70335889412dec99feeb0f8"}, - {file = "mypy-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:551d4a0cdcbd1d2cccdcc7cb516bb4ae888794929f5b040bb51aae1846062901"}, - {file = "mypy-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55d28d7963bef00c330cb6461db80b0b72afe2f3c4e2963c99517cf06454e665"}, - {file = "mypy-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:870bd1ffc8a5862e593185a4c169804f2744112b4a7c55b93eb50f48e7a77010"}, - {file = "mypy-1.7.0-py3-none-any.whl", hash = "sha256:96650d9a4c651bc2a4991cf46f100973f656d69edc7faf91844e87fe627f7e96"}, - {file = "mypy-1.7.0.tar.gz", hash = "sha256:1e280b5697202efa698372d2f39e9a6713a0395a756b1c6bd48995f8d72690dc"}, + {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, + {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, + {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, + {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, + {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, + {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, + {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, + {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, + {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, + {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, + {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, + {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, + {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, + {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, + {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, + {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, + {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, + {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, + {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, + {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, + {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, + {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, + {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, + {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, + {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, + {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, + {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, ] [package.dependencies] @@ -636,27 +636,27 @@ files = [ [[package]] name = "nh3" -version = "0.2.14" -description = "Ammonia HTML sanitizer Python binding" +version = "0.2.15" +description = "Python bindings to the ammonia HTML sanitization library." optional = false python-versions = "*" files = [ - {file = "nh3-0.2.14-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a"}, - {file = "nh3-0.2.14-cp37-abi3-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75"}, - {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450"}, - {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e"}, - {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e"}, - {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad"}, - {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2"}, - {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525"}, - {file = "nh3-0.2.14-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6"}, - {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4"}, - {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5"}, - {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d"}, - {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6"}, - {file = "nh3-0.2.14-cp37-abi3-win32.whl", hash = "sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873"}, - {file = "nh3-0.2.14-cp37-abi3-win_amd64.whl", hash = "sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e"}, - {file = "nh3-0.2.14.tar.gz", hash = "sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4"}, + {file = "nh3-0.2.15-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:9c0d415f6b7f2338f93035bba5c0d8c1b464e538bfbb1d598acd47d7969284f0"}, + {file = "nh3-0.2.15-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:6f42f99f0cf6312e470b6c09e04da31f9abaadcd3eb591d7d1a88ea931dca7f3"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac19c0d68cd42ecd7ead91a3a032fdfff23d29302dbb1311e641a130dfefba97"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0d77272ce6d34db6c87b4f894f037d55183d9518f948bba236fe81e2bb4e28"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8d595df02413aa38586c24811237e95937ef18304e108b7e92c890a06793e3bf"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86e447a63ca0b16318deb62498db4f76fc60699ce0a1231262880b38b6cff911"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3277481293b868b2715907310c7be0f1b9d10491d5adf9fce11756a97e97eddf"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60684857cfa8fdbb74daa867e5cad3f0c9789415aba660614fe16cd66cbb9ec7"}, + {file = "nh3-0.2.15-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3b803a5875e7234907f7d64777dfde2b93db992376f3d6d7af7f3bc347deb305"}, + {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0d02d0ff79dfd8208ed25a39c12cbda092388fff7f1662466e27d97ad011b770"}, + {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:f3b53ba93bb7725acab1e030bc2ecd012a817040fd7851b332f86e2f9bb98dc6"}, + {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:b1e97221cedaf15a54f5243f2c5894bb12ca951ae4ddfd02a9d4ea9df9e1a29d"}, + {file = "nh3-0.2.15-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a5167a6403d19c515217b6bcaaa9be420974a6ac30e0da9e84d4fc67a5d474c5"}, + {file = "nh3-0.2.15-cp37-abi3-win32.whl", hash = "sha256:427fecbb1031db085eaac9931362adf4a796428ef0163070c484b5a768e71601"}, + {file = "nh3-0.2.15-cp37-abi3-win_amd64.whl", hash = "sha256:bc2d086fb540d0fa52ce35afaded4ea526b8fc4d3339f783db55c95de40ef02e"}, + {file = "nh3-0.2.15.tar.gz", hash = "sha256:d1e30ff2d8d58fb2a14961f7aac1bbb1c51f9bdd7da727be35c63826060b0bf3"}, ] [[package]] @@ -747,36 +747,36 @@ files = [ [[package]] name = "pandas" -version = "2.1.3" +version = "2.1.4" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acf08a73b5022b479c1be155d4988b72f3020f308f7a87c527702c5f8966d34f"}, - {file = "pandas-2.1.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3cc4469ff0cf9aa3a005870cb49ab8969942b7156e0a46cc3f5abd6b11051dfb"}, - {file = "pandas-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35172bff95f598cc5866c047f43c7f4df2c893acd8e10e6653a4b792ed7f19bb"}, - {file = "pandas-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59dfe0e65a2f3988e940224e2a70932edc964df79f3356e5f2997c7d63e758b4"}, - {file = "pandas-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0296a66200dee556850d99b24c54c7dfa53a3264b1ca6f440e42bad424caea03"}, - {file = "pandas-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:465571472267a2d6e00657900afadbe6097c8e1dc43746917db4dfc862e8863e"}, - {file = "pandas-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04d4c58e1f112a74689da707be31cf689db086949c71828ef5da86727cfe3f82"}, - {file = "pandas-2.1.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7fa2ad4ff196768ae63a33f8062e6838efed3a319cf938fdf8b95e956c813042"}, - {file = "pandas-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4441ac94a2a2613e3982e502ccec3bdedefe871e8cea54b8775992485c5660ef"}, - {file = "pandas-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5ded6ff28abbf0ea7689f251754d3789e1edb0c4d0d91028f0b980598418a58"}, - {file = "pandas-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca5680368a5139d4920ae3dc993eb5106d49f814ff24018b64d8850a52c6ed2"}, - {file = "pandas-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:de21e12bf1511190fc1e9ebc067f14ca09fccfb189a813b38d63211d54832f5f"}, - {file = "pandas-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a5d53c725832e5f1645e7674989f4c106e4b7249c1d57549023ed5462d73b140"}, - {file = "pandas-2.1.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7cf4cf26042476e39394f1f86868d25b265ff787c9b2f0d367280f11afbdee6d"}, - {file = "pandas-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72c84ec1b1d8e5efcbff5312abe92bfb9d5b558f11e0cf077f5496c4f4a3c99e"}, - {file = "pandas-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f539e113739a3e0cc15176bf1231a553db0239bfa47a2c870283fd93ba4f683"}, - {file = "pandas-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc77309da3b55732059e484a1efc0897f6149183c522390772d3561f9bf96c00"}, - {file = "pandas-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:08637041279b8981a062899da0ef47828df52a1838204d2b3761fbd3e9fcb549"}, - {file = "pandas-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b99c4e51ef2ed98f69099c72c75ec904dd610eb41a32847c4fcbc1a975f2d2b8"}, - {file = "pandas-2.1.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f7ea8ae8004de0381a2376662c0505bb0a4f679f4c61fbfd122aa3d1b0e5f09d"}, - {file = "pandas-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcd76d67ca2d48f56e2db45833cf9d58f548f97f61eecd3fdc74268417632b8a"}, - {file = "pandas-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1329dbe93a880a3d7893149979caa82d6ba64a25e471682637f846d9dbc10dd2"}, - {file = "pandas-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:321ecdb117bf0f16c339cc6d5c9a06063854f12d4d9bc422a84bb2ed3207380a"}, - {file = "pandas-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:11a771450f36cebf2a4c9dbd3a19dfa8c46c4b905a3ea09dc8e556626060fe71"}, - {file = "pandas-2.1.3.tar.gz", hash = "sha256:22929f84bca106921917eb73c1521317ddd0a4c71b395bcf767a106e3494209f"}, + {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"}, + {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"}, + {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"}, + {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"}, + {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"}, + {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"}, + {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"}, + {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"}, + {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"}, + {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"}, + {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"}, + {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"}, + {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"}, + {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"}, + {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"}, + {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"}, + {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"}, + {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"}, + {file = "pandas-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f06bda01a143020bad20f7a85dd5f4a1600112145f126bc9e3e42077c24ef34"}, + {file = "pandas-2.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab5796839eb1fd62a39eec2916d3e979ec3130509930fea17fe6f81e18108f6a"}, + {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbaf9e8d3a63a9276d707b4d25930a262341bca9874fcb22eff5e3da5394732"}, + {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ebfd771110b50055712b3b711b51bee5d50135429364d0498e1213a7adc2be8"}, + {file = "pandas-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ea107e0be2aba1da619cc6ba3f999b2bfc9669a83554b1904ce3dd9507f0860"}, + {file = "pandas-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:d65148b14788b3758daf57bf42725caa536575da2b64df9964c563b015230984"}, + {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"}, ] [package.dependencies] @@ -842,13 +842,13 @@ testing = ["pytest", "pytest-cov"] [[package]] name = "platformdirs" -version = "4.0.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, - {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -883,13 +883,13 @@ files = [ [[package]] name = "pre-commit" -version = "3.5.0" +version = "3.6.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, - {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, + {file = "pre_commit-3.6.0-py2.py3-none-any.whl", hash = "sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376"}, + {file = "pre_commit-3.6.0.tar.gz", hash = "sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"}, ] [package.dependencies] @@ -1025,13 +1025,13 @@ pytest = ">=5.0" [[package]] name = "pytest-icdiff" -version = "0.8" +version = "0.9" description = "use icdiff for better error messages in pytest assertions" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-icdiff-0.8.tar.gz", hash = "sha256:4f493ae5ee63c8e90e9f96d4b0b2968b19634dfed8a6e3c9848fcd0d6cadcf7b"}, - {file = "pytest_icdiff-0.8-py3-none-any.whl", hash = "sha256:8fac8667d7042270c23019580b4b5dfd81e1c3e5a9bc9d5df6ac4a49788d42f2"}, + {file = "pytest-icdiff-0.9.tar.gz", hash = "sha256:13aede616202e57fcc882568b64589002ef85438046f012ac30a8d959dac8b75"}, + {file = "pytest_icdiff-0.9-py3-none-any.whl", hash = "sha256:efee0da3bd1b24ef2d923751c5c547fbb8df0a46795553fba08ef57c3ca03d82"}, ] [package.dependencies] @@ -1495,13 +1495,13 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] [[package]] @@ -1533,13 +1533,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.24.7" +version = "20.25.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.24.7-py3-none-any.whl", hash = "sha256:a18b3fd0314ca59a2e9f4b556819ed07183b3e9a3702ecfe213f593d44f7b3fd"}, - {file = "virtualenv-20.24.7.tar.gz", hash = "sha256:69050ffb42419c91f6c1284a7b24e0475d793447e35929b488bf6a0aade39353"}, + {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, + {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, ] [package.dependencies] @@ -1569,4 +1569,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "c871feaa79f05bc82a6873d4dd6f06baad78a712eb38e7afc9dc400260a8535a" +content-hash = "396847f137a1189d4b19d6867fee9bb90bf984866586848499777ade8450b28e" diff --git a/pyproject.toml b/pyproject.toml index 999a108b7..00ebbc954 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ include = ["cognite_toolkit/*", "cognite_toolkit/**/*"] [tool.poetry.dependencies] python = ">=3.9,<3.12" python-dotenv = "^1.0.0" -cognite-sdk = {version = "^7.3.3", extras = ["pandas"]} +cognite-sdk = {version = "^7.5.4", extras = ["pandas"]} pandas = "^2.1.1" pyyaml = "^6.0.1" dacite = "^1.8.1" @@ -22,8 +22,8 @@ pytest-icdiff = "*" # Used for better diffs in pytest [tool.poetry.group.dev.dependencies] -mypy = "^1.6.1" -pre-commit = "^3.5.0" +mypy = "^1.7.1" +pre-commit = "^3.6.0" pytest = "^7.4.2" pep8-naming = "^0.13.3" pytest-regressions = "^2.4.2" From 0e26c01940d3de61834f7c9da42f22666c5d2fa3 Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Mon, 11 Dec 2023 14:53:33 +0100 Subject: [PATCH 24/90] One Config (#200) * refactor; Moved modules around * refactor: update init command to new folder structure * feat: Created generate_config * refactor: Generate config * refactor; Work towards one config * refactor: Towards one module * fix: Copy over single file * fix: iterate modules function * tests; updated test * build; Added config yaml * refactor: populate config * tests: Added test to check config.yaml is uptodate * build: ruamel.yaml * refactor: preserve comments * refactor; update test data * refactor: print out on init * fix: no existing config * style: fix typo * tests: updated tests * refactor: better error message * refactor: update auth command * fix; path' * Update README * Move cdf_apm_base into separate folder * refactor: review feedback graceful exit * refactor; rename to cognite and custom modules * refactor: Added example module * refactor: Finish renaming and adjust example * fix: place variables correctly in hierarchy * refactor: indent comments * fix: indent of comments * docs: changelog and documentation * poetry update --------- Co-authored-by: Greger Wedel --- .gitignore | 3 +- CHANGELOG.cdf-tk.md | 3 + CHANGELOG.templates.md | 6 +- cognite_toolkit/cdf.py | 66 +- cognite_toolkit/cdf_tk/templates.py | 681 ++++++++++++++---- cognite_toolkit/cognite_modules/README.md | 26 + .../common/cdf_auth_readwrite_all/README.md | 0 .../auth/readonly.all.group.yaml | 0 .../auth/readwrite.all.group.yaml | 0 .../default.config.yaml | 0 .../core}/cdf_apm_base/README.md | 0 .../data_models/1.Activity.container.yaml | 0 .../data_models/1.Activity.view.yaml | 0 .../data_models/2.Operation.container.yaml | 0 .../data_models/2.Operation.view.yaml | 0 .../data_models/3.Notification.container.yaml | 0 .../data_models/3.Notification.view.yaml | 0 .../data_models/4.ApmConfig.container.yaml | 0 .../data_models/4.ApmConfig.view.yaml | 0 .../data_models/APM_Config.space.yaml | 0 .../data_models/apm.datamodel.yaml | 0 .../data_models/apm_config.datamodel.yaml | 0 .../data_models/apm_data_model.space.yaml | 0 .../core}/cdf_apm_base/default.config.yaml | 0 .../{ => cognite_modules}/default.config.yaml | 0 .../default.packages.yaml | 0 .../{ => cognite_modules}/examples/README.md | 0 .../cdf_apm_simple_data_model/README.md | 0 .../data_models/1.Asset.container.yaml | 0 .../data_models/2.WorkOrder.container.yaml | 0 .../data_models/3.WorkItem.container.yaml | 0 .../data_models/4.Asset.view.yaml | 0 .../data_models/5.WorkItem.view.yaml | 0 .../data_models/6.WorkOrder.view.yaml | 0 .../data_models/7.apm_simple.datamodel.yaml | 0 .../data_models/apm_simple.space.yaml | 0 .../default.config.yaml | 0 ...orkmate_apm_simple_load_asset2children.sql | 0 ...rkmate_apm_simple_load_asset2children.yaml | 0 ...et_oid_workmate_apm_simple_load_assets.sql | 0 ...t_oid_workmate_apm_simple_load_assets.yaml | 0 ...d_pi_apm_simple_load_timeseries2assets.sql | 0 ..._pi_apm_simple_load_timeseries2assets.yaml | 0 ...oid_workmate_apm_simple_load_workitems.sql | 0 ...id_workmate_apm_simple_load_workitems.yaml | 0 ...kmate_apm_simple_load_workitems2assets.sql | 0 ...mate_apm_simple_load_workitems2assets.yaml | 0 ...e_apm_simple_load_workitems2workorders.sql | 0 ..._apm_simple_load_workitems2workorders.yaml | 0 ...id_workmate_apm_simple_load_workorders.sql | 0 ...d_workmate_apm_simple_load_workorders.yaml | 0 ...mate_apm_simple_load_workorders2assets.sql | 0 ...ate_apm_simple_load_workorders2assets.yaml | 0 .../examples/cdf_oid_example_data/LICENSE.md | 0 .../Open Industrial Data - Terms of Use.pdf | Bin .../examples/cdf_oid_example_data/README.md | 0 .../data_sets/data_sets.yaml | 0 .../cdf_oid_example_data/default.config.yaml | 0 .../files/PH-25578-P-4110006-001.pdf | Bin .../files/PH-25578-P-4110010-001.pdf | Bin .../files/PH-25578-P-4110119-001.pdf | Bin .../files/PH-ME-P-0003-001.pdf | Bin .../files/PH-ME-P-0004-001.pdf | Bin .../files/PH-ME-P-0151-001.pdf | Bin .../files/PH-ME-P-0152-001.pdf | Bin .../files/PH-ME-P-0153-001.pdf | Bin .../files/PH-ME-P-0156-001.pdf | Bin .../files/PH-ME-P-0156-002.pdf | Bin .../files/PH-ME-P-0160-001.pdf | Bin .../cdf_oid_example_data/files/files.yaml | 0 .../raw/asset2children.csv | 0 .../raw/asset2children.yaml | 0 .../cdf_oid_example_data/raw/assets.csv | 0 .../cdf_oid_example_data/raw/assets.yaml | 0 .../raw/files_metadata.csv | 0 .../raw/files_metadata.yaml | 0 .../raw/timeseries2assets.csv | 0 .../raw/timeseries2assets.yaml | 0 .../raw/workitem2assets.csv | 0 .../raw/workitem2assets.yaml | 0 .../cdf_oid_example_data/raw/workitems.csv | 0 .../cdf_oid_example_data/raw/workitems.yaml | 0 .../raw/workorder2assets.csv | 0 .../raw/workorder2assets.yaml | 0 .../raw/workorder2items.csv | 0 .../raw/workorder2items.yaml | 0 .../cdf_oid_example_data/raw/workorders.csv | 0 .../cdf_oid_example_data/raw/workorders.yaml | 0 .../timeseries/timeseries.yaml | 0 .../tr_asset_oid_workmate_asset_hierarchy.sql | 0 ...tr_asset_oid_workmate_asset_hierarchy.yaml | 0 .../example_pump_asset_hierarchy/README.md | 0 .../data_sets/data_sets.yaml | 0 .../default.config.yaml | 0 .../raw/collections_pump.csv | 0 .../raw/collections_pump.yaml | 0 ..._asset_hierarchy-load-collections_pump.sql | 0 ...asset_hierarchy-load-collections_pump.yaml | 0 .../experimental/README.md | 0 .../cdf_asset_source_model/README.md | 0 .../data_models/1.InstanceSpace.space.yaml | 0 .../data_models/2.ModelSpace.space.yaml | 0 .../data_models/ExtendedAsset.container.yaml | 0 .../data_models/ExtendedAsset.view.yaml | 0 .../ExtendedSourceData.datamodel.yaml | 0 .../default.config.yaml | 0 ...asset_hierarchy_cdf_asset_source_model.sql | 0 ...sset_hierarchy_cdf_asset_source_model.yaml | 0 .../example_pump_data_model/README.md | 0 .../data_models/1.spaces.space.yaml | 0 .../data_models/2.Pump.container.yaml | 0 .../data_models/3.LiftStation.view.yaml | 0 .../data_models/4.Pump.view.yaml | 0 .../5.PumpLiftStations.datamodel.yaml | 0 .../default.config.yaml | 0 ...odel-populate-lift_station_pumps_edges.sql | 0 ...del-populate-lift_station_pumps_edges.yaml | 0 .../pump_model-populate-pump_container.sql | 0 .../pump_model-populate-pump_container.yaml | 0 .../infield}/cdf_infield_common/README.md | 0 .../auth/applications-configuration.yaml | 0 .../data_models/infieldAppData.space.yaml | 0 .../cdf_infield_common/default.config.yaml | 0 .../infield}/cdf_infield_location/README.md | 0 .../infield_checklist_admin_role.group.yaml | 0 .../auth/infield_normal_role.group.yaml | 0 .../infield_template_admin_role.group.yaml | 0 .../auth/infield_viewer_role.group.yaml | 0 .../infieldLocationAppData.space.yaml | 0 .../infieldLocationSourceData.space.yaml | 0 .../infield_apm_app_config.node.yaml | 0 .../data_sets/location_app_data_set.yaml | 0 .../location_source_data_set.yaml.tmpl | 0 .../cdf_infield_location/default.config.yaml | 0 ...nc_asset_parents_from_hierarchy_to_apm.sql | 0 ...c_asset_parents_from_hierarchy_to_apm.yaml | 0 ...ield_sync_assets_from_hierarchy_to_apm.sql | 0 ...eld_sync_assets_from_hierarchy_to_apm.yaml | 0 ...ield_sync_workorders_to_apm_activities.sql | 0 ...eld_sync_workorders_to_apm_activities.yaml | 0 cognite_toolkit/config.yaml | 137 +++- cognite_toolkit/custom_modules/README.md | 9 + .../my_example_module/README.md | 1 + .../data_sets/data_sets.yaml | 4 + .../my_example_module/default.config.yaml | 1 + .../timeseries/timeseries.yaml | 18 + .../{local.yaml => environments.yaml} | 8 +- cognite_toolkit/local_modules/README.md | 7 - cognite_toolkit/modules/README.md | 10 - cognite_toolkit/packages.yaml | 10 - config.yaml | 4 - poetry.lock | 63 +- pyproject.toml | 2 +- tests/test_approval_modules.py | 68 +- tests/{test_versions.py => test_build.py} | 11 + .../a_module/default.config.yaml | 2 + .../another_module/default.config.yaml | 5 + .../cognite_modules/default.config.yaml | 2 + .../child_module/default.config.yaml | 1 + tests/test_cdf_tk/test_templates.py | 87 +++ 160 files changed, 1001 insertions(+), 234 deletions(-) create mode 100644 cognite_toolkit/cognite_modules/README.md rename cognite_toolkit/{ => cognite_modules}/common/cdf_auth_readwrite_all/README.md (100%) rename cognite_toolkit/{ => cognite_modules}/common/cdf_auth_readwrite_all/auth/readonly.all.group.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/common/cdf_auth_readwrite_all/auth/readwrite.all.group.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/common/cdf_auth_readwrite_all/default.config.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/core}/cdf_apm_base/README.md (100%) rename cognite_toolkit/{modules => cognite_modules/core}/cdf_apm_base/data_models/1.Activity.container.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/core}/cdf_apm_base/data_models/1.Activity.view.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/core}/cdf_apm_base/data_models/2.Operation.container.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/core}/cdf_apm_base/data_models/2.Operation.view.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/core}/cdf_apm_base/data_models/3.Notification.container.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/core}/cdf_apm_base/data_models/3.Notification.view.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/core}/cdf_apm_base/data_models/4.ApmConfig.container.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/core}/cdf_apm_base/data_models/4.ApmConfig.view.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/core}/cdf_apm_base/data_models/APM_Config.space.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/core}/cdf_apm_base/data_models/apm.datamodel.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/core}/cdf_apm_base/data_models/apm_config.datamodel.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/core}/cdf_apm_base/data_models/apm_data_model.space.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/core}/cdf_apm_base/default.config.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/default.config.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/default.packages.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/README.md (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/README.md (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/data_models/1.Asset.container.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/data_models/2.WorkOrder.container.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/data_models/3.WorkItem.container.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/data_models/4.Asset.view.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/data_models/5.WorkItem.view.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/data_models/6.WorkOrder.view.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/data_models/7.apm_simple.datamodel.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/data_models/apm_simple.space.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/default.config.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.sql (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.sql (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.sql (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.sql (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.sql (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.sql (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.sql (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.sql (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/LICENSE.md (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/Open Industrial Data - Terms of Use.pdf (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/README.md (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/data_sets/data_sets.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/default.config.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/files/PH-25578-P-4110006-001.pdf (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/files/PH-25578-P-4110010-001.pdf (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/files/PH-25578-P-4110119-001.pdf (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/files/PH-ME-P-0003-001.pdf (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/files/PH-ME-P-0004-001.pdf (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/files/PH-ME-P-0151-001.pdf (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/files/PH-ME-P-0152-001.pdf (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/files/PH-ME-P-0153-001.pdf (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/files/PH-ME-P-0156-001.pdf (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/files/PH-ME-P-0156-002.pdf (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/files/PH-ME-P-0160-001.pdf (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/files/files.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/asset2children.csv (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/asset2children.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/assets.csv (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/assets.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/files_metadata.csv (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/files_metadata.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/timeseries2assets.csv (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/timeseries2assets.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/workitem2assets.csv (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/workitem2assets.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/workitems.csv (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/workitems.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/workorder2assets.csv (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/workorder2assets.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/workorder2items.csv (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/workorder2items.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/workorders.csv (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/raw/workorders.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/timeseries/timeseries.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.sql (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/example_pump_asset_hierarchy/README.md (100%) rename cognite_toolkit/{ => cognite_modules}/examples/example_pump_asset_hierarchy/data_sets/data_sets.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/example_pump_asset_hierarchy/default.config.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/example_pump_asset_hierarchy/raw/collections_pump.csv (100%) rename cognite_toolkit/{ => cognite_modules}/examples/example_pump_asset_hierarchy/raw/collections_pump.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.sql (100%) rename cognite_toolkit/{ => cognite_modules}/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/README.md (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/cdf_asset_source_model/README.md (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/cdf_asset_source_model/data_models/1.InstanceSpace.space.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/cdf_asset_source_model/data_models/2.ModelSpace.space.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/cdf_asset_source_model/data_models/ExtendedAsset.container.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/cdf_asset_source_model/data_models/ExtendedAsset.view.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/cdf_asset_source_model/data_models/ExtendedSourceData.datamodel.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/cdf_asset_source_model/default.config.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.sql (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/example_pump_data_model/README.md (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/example_pump_data_model/data_models/1.spaces.space.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/example_pump_data_model/data_models/2.Pump.container.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/example_pump_data_model/data_models/3.LiftStation.view.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/example_pump_data_model/data_models/4.Pump.view.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/example_pump_data_model/data_models/5.PumpLiftStations.datamodel.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/example_pump_data_model/default.config.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.sql (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.sql (100%) rename cognite_toolkit/{ => cognite_modules}/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_common/README.md (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_common/auth/applications-configuration.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_common/data_models/infieldAppData.space.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_common/default.config.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/README.md (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/auth/infield_checklist_admin_role.group.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/auth/infield_normal_role.group.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/auth/infield_template_admin_role.group.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/auth/infield_viewer_role.group.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/data_models/infieldLocationAppData.space.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/data_models/infieldLocationSourceData.space.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/data_models/infield_apm_app_config.node.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/data_sets/location_app_data_set.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/data_sets/location_source_data_set.yaml.tmpl (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/default.config.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.sql (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.sql (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.sql (100%) rename cognite_toolkit/{modules => cognite_modules/infield}/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml (100%) create mode 100644 cognite_toolkit/custom_modules/README.md create mode 100644 cognite_toolkit/custom_modules/my_example_module/README.md create mode 100644 cognite_toolkit/custom_modules/my_example_module/data_sets/data_sets.yaml create mode 100644 cognite_toolkit/custom_modules/my_example_module/default.config.yaml create mode 100644 cognite_toolkit/custom_modules/my_example_module/timeseries/timeseries.yaml rename cognite_toolkit/{local.yaml => environments.yaml} (92%) delete mode 100644 cognite_toolkit/local_modules/README.md delete mode 100644 cognite_toolkit/modules/README.md delete mode 100644 cognite_toolkit/packages.yaml delete mode 100644 config.yaml rename tests/{test_versions.py => test_build.py} (83%) create mode 100644 tests/test_cdf_tk/project_configs/cognite_modules/a_module/default.config.yaml create mode 100644 tests/test_cdf_tk/project_configs/cognite_modules/another_module/default.config.yaml create mode 100644 tests/test_cdf_tk/project_configs/cognite_modules/default.config.yaml create mode 100644 tests/test_cdf_tk/project_configs/cognite_modules/parent_module/child_module/default.config.yaml create mode 100644 tests/test_cdf_tk/test_templates.py diff --git a/.gitignore b/.gitignore index 72eaa3c9a..68120f5ae 100644 --- a/.gitignore +++ b/.gitignore @@ -278,4 +278,5 @@ new_project/ # If you need to update the cognite_toolkit template files for local.yaml and config.yaml, comment below local.yaml config.yaml -demo_project/ \ No newline at end of file +demo_project/ +tests/pytest-project/ diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index 71515b4fd..59873b868 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -25,6 +25,9 @@ Changes are grouped as follows: - Require all spaces to be explicitly defined as separate .space.yaml file. - The `data_set_id` for `Transformations` must now be set explicitly in the yaml config file for the `Transformation` under the `data_set_id` key. Note that you also need to explicitly define the `data_set` in its own yaml config file. +- All config files have been merged to a single config file, `config.yaml`. Upon calling `cdf-tk init` the `config.yaml` + is created in the root folder of the project based on the `default.config.yaml` file of each module. + ### Fixed - When running `cdf-tk deploy` with `--dry-run` a `ValueError` was raised if not all datasets were pre-existing. diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index dbcc931a6..44c727114 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -17,7 +17,11 @@ Changes are grouped as follows: ## TBD - 2023-12-TBD ### Added - Explicitly define model `space` in `experimental/cdf_asset_source_model/` and `experimental/example_pump_model/`. - +- The module `my_example_module` has been added to the `custom_modules` folder. +### Changed +- All cognite templates have been moved into `cognite_templates` folder, while `local_templates` is renamed to `custom_templates`. +- Move cdf_apm_base into separate folder. +- The file `local.yaml` has been renamed `environments.yaml` to better reflect its purpose. ## [0.2.0] - 2023-12-01 ### Changed diff --git a/cognite_toolkit/cdf.py b/cognite_toolkit/cdf.py index e99f9ca12..54b06ecb5 100755 --- a/cognite_toolkit/cdf.py +++ b/cognite_toolkit/cdf.py @@ -24,7 +24,15 @@ AuthLoader, drop_load_resources, ) -from cognite_toolkit.cdf_tk.templates import build_config, read_environ_config +from cognite_toolkit.cdf_tk.templates import ( + COGNITE_MODULES, + CONFIG_FILE, + CUSTOM_MODULES, + ENVIRONMENTS_FILE, + build_config, + generate_config, + read_environ_config, +) from cognite_toolkit.cdf_tk.utils import CDFToolConfig app = typer.Typer(pretty_exceptions_short=False, pretty_exceptions_show_locals=False, pretty_exceptions_enable=False) @@ -150,19 +158,31 @@ def build( ), ] = False, ) -> None: + source_dir = Path(source_dir) """Build configuration files from the module templates to a local build directory.""" - if not Path(source_dir).is_dir() or not (Path(source_dir) / "local.yaml").is_file(): - print(f" [bold red]ERROR:[/] {source_dir} does not exist or no local.yaml file found.") + if not source_dir.is_dir(): + print(f" [bold red]ERROR:[/] {source_dir} does not exist") + exit(1) + environment_file = Path.cwd() / ENVIRONMENTS_FILE + if not environment_file.is_file() and not (environment_file := source_dir / ENVIRONMENTS_FILE).is_file(): + print(f" [bold red]ERROR:[/] {environment_file} does not exist") + exit(1) + config_file = Path.cwd() / CONFIG_FILE + if not config_file.is_file() and not (config_file := source_dir / CONFIG_FILE).is_file(): + print(f" [bold red]ERROR:[/] {config_file} does not exist") exit(1) print( Panel( - f"[bold]Building config files from templates into {build_dir} for environment {build_env} using {source_dir} as sources...[/bold]" + f"[bold]Building config files from templates into {build_dir!s} for environment {build_env} using {source_dir!s} as sources...[/bold]" + f"\n[bold]Environment file:[/] {environment_file.absolute().relative_to(Path.cwd())!s} and [bold]config file:[/] {config_file.absolute().relative_to(Path.cwd())!s}" ) ) build_config( - build_dir=build_dir, + build_dir=Path(build_dir), source_dir=source_dir, + config_file=config_file, + environment_file=environment_file, build_env=build_env, clean=clean, verbose=ctx.obj.verbose, @@ -440,7 +460,7 @@ def auth_verify( "-f", help="Group yaml configuration file to use for group verification", ), - ] = "/common/cdf_auth_readwrite_all/auth/readwrite.all.group.yaml", + ] = f"/{COGNITE_MODULES}/common/cdf_auth_readwrite_all/auth/readwrite.all.group.yaml", update_group: Annotated[ Optional[int], typer.Option( @@ -544,28 +564,20 @@ def main_init( ): """Initialize a new CDF project with templates.""" - files_to_copy = [ - "default.config.yaml", - "default.packages.yaml", - ] + files_to_copy = [] dirs_to_copy = [] if not upgrade: files_to_copy.extend( [ - "config.yaml", - "local.yaml", - "packages.yaml", + "environments.yaml", "README.md", ".gitignore", ".env.tmpl", ] ) - dirs_to_copy.append("local_modules") + dirs_to_copy.append(CUSTOM_MODULES) module_dirs_to_copy = [ - "common", - "modules", - "examples", - "experimental", + COGNITE_MODULES, ] template_dir = resources.files("cognite_toolkit") target_dir = Path.cwd() / f"{init_dir}" @@ -597,7 +609,7 @@ def main_init( print(dirs_to_copy) extract_dir = None if upgrade and git is not None: - zip = f"https://github.com/cognitedata/cdf-project-templates/archive/refs/heads/{git}.zip" + toolkit_github_url = f"https://github.com/cognitedata/cdf-project-templates/archive/refs/heads/{git}.zip" extract_dir = tempfile.mkdtemp(prefix="git.", suffix=".tmp", dir=Path.cwd()) print(f"Upgrading templates from https://github.com/cognitedata/cdf-project-templates, branch {git}...") print( @@ -605,7 +617,7 @@ def main_init( ) if not dry_run: try: - zip_path, _ = urllib.request.urlretrieve(zip) + zip_path, _ = urllib.request.urlretrieve(toolkit_github_url) with zipfile.ZipFile(zip_path, "r") as f: f.extractall(extract_dir) except Exception as e: @@ -661,7 +673,19 @@ def main_init( print(f"New project created in {target_dir}.") if upgrade: print(" All default.config.yaml files in the modules have been upgraded.") - print(" Your config.yaml files may need to be updated to override new default variales.") + print(" Your config.yaml files may need to be updated to override new default variables.") + + config_filepath = target_dir / "config.yaml" + if not dry_run: + if clean or not config_filepath.exists(): + config_str, _ = generate_config(target_dir) + config_filepath.write_text(config_str) + print(f"Created config.yaml file in {target_dir}.") + else: + current = config_filepath.read_text() + config_str, difference = generate_config(target_dir, existing_config=current) + config_filepath.write_text(config_str) + print(str(difference)) def _process_include(include: Optional[list[str]], interactive: bool) -> list[str]: diff --git a/cognite_toolkit/cdf_tk/templates.py b/cognite_toolkit/cdf_tk/templates.py index 3bda5e70a..09e2a78fb 100644 --- a/cognite_toolkit/cdf_tk/templates.py +++ b/cognite_toolkit/cdf_tk/templates.py @@ -1,25 +1,43 @@ from __future__ import annotations +import io import itertools import os import re import shutil +from collections import ChainMap, UserList, defaultdict +from collections.abc import Mapping, Sequence +from dataclasses import dataclass from pathlib import Path -from typing import Any +from typing import Any, Literal, overload import yaml from rich import print +from ruamel.yaml import YAML, CommentedMap from cognite_toolkit.cdf_tk.load import LOADER_BY_FOLDER_NAME from cognite_toolkit.cdf_tk.utils import LoadWarning, validate_case_raw +# This is the default config located locally in each module. +DEFAULT_CONFIG_FILE = "default.config.yaml" +# The environment file: +ENVIRONMENTS_FILE = "environments.yaml" +# The local config file: +CONFIG_FILE = "config.yaml" +# The default package files +DEFAULT_PACKAGES_FILE = "default.packages.yaml" +# The package files: +PACKAGES_FILE = "packages.yaml" +COGNITE_MODULES = "cognite_modules" +CUSTOM_MODULES = "custom_modules" + TMPL_DIRS = ["common", "modules", "local_modules", "examples", "experimental"] # Add any other files below that should be included in a build -EXCL_FILES = ["README.md"] -# Which suffixes to exclude when we create indexed files (i.e. they are bundled with their main config file) -EXCL_INDEX_SUFFIX = ["sql", "csv", "parquet"] +EXCL_FILES = ["README.md", DEFAULT_CONFIG_FILE] +# Which suffixes to exclude when we create indexed files (i.e., they are bundled with their main config file) +EXCL_INDEX_SUFFIX = frozenset([".sql", ".csv", ".parquet"]) # Which suffixes to process for template variable replacement -PROC_TMPL_VARS_SUFFIX = ["yaml", "yml", "sql", "csv", "parquet", "json", "txt", "md", "html", "py"] +PROC_TMPL_VARS_SUFFIX = frozenset([".yaml", ".yml", ".sql", ".csv", ".parquet", ".json", ".txt", ".md", ".html", ".py"]) def read_environ_config( @@ -42,19 +60,21 @@ def read_environ_config( global_config = read_yaml_files(root_dir, "default.packages.yaml") packages = global_config.get("packages", {}) packages.update(read_yaml_files(root_dir, "packages.yaml").get("packages", {})) - local_config = read_yaml_files(root_dir, "local.yaml") - print(f" Environment is {build_env}, using that section in local.yaml.\n") + environment_config = read_yaml_files(root_dir, ENVIRONMENTS_FILE) + + print(f" Environment is {build_env}, using that section in {ENVIRONMENTS_FILE}.\n") if verbose: print(" [bold green]INFO:[/] Found defined packages:") for name, content in packages.items(): print(f" {name}: {content}") modules = [] - if len(local_config) == 0: + if len(environment_config) == 0: return [] try: - defs = local_config[build_env] + defs = environment_config[build_env] except KeyError: - raise ValueError(f"Environment {build_env} not found in local.yaml") + print(f" [bold red]ERROR:[/] Environment {build_env} not found in {ENVIRONMENTS_FILE}") + exit(1) os.environ["CDF_ENVIRON"] = build_env for k, v in defs.items(): @@ -66,9 +86,10 @@ def read_environ_config( ) print(f" Environment is {build_env}, continuing (would have stopped for staging and prod)...") else: - raise ValueError( - f"Project name mismatch (CDF_PROJECT) between local.yaml ({v}) and what is defined in environment ({os.environ['CDF_PROJECT']})." + print( + f" [bold red]ERROR:[/]Project name mismatch (CDF_PROJECT) between local.yaml ({v}) and what is defined in environment ({os.environ['CDF_PROJECT']})." ) + exit(1) elif k == "type": os.environ["CDF_BUILD_TYPE"] = v elif k == "deploy": @@ -113,6 +134,97 @@ def read_environ_config( return load_list +def get_selected_modules( + source_module: Path, + environment_file: Path, + build_env: str = "dev", + verbose: bool = False, +) -> list[str]: + print(f" Environment is {build_env}, using that section in {ENVIRONMENTS_FILE!s}.\n") + + modules_by_package = _read_packages(source_module, verbose) + + selected_module_and_packages = _get_modules_and_packages(environment_file, build_env) + + selected_packages = [package for package in selected_module_and_packages if package in modules_by_package] + if verbose: + print(" [bold green]INFO:[/] Selected packages:") + for package in selected_packages: + print(f" {package}") + + selected_modules = [module for module in selected_module_and_packages if module not in modules_by_package] + selected_modules.extend(itertools.chain.from_iterable(modules_by_package[package] for package in selected_packages)) + + if verbose: + print(" [bold green]INFO:[/] Selected modules:") + for module in selected_modules: + print(f" {module}") + if not selected_modules: + print( + f" [bold yellow]WARNING:[/] Found no defined modules in {ENVIRONMENTS_FILE!s}, have you configured the environment ({build_env})?" + ) + exit(1) + + available_modules = {module.name for module, _ in iterate_modules(source_module)} + if not (missing_modules := set(selected_modules) - available_modules): + return selected_modules + + print(f" [bold red]ERROR:[/] Modules {missing_modules} not found in {source_module}.") + exit(1) + + +def _get_modules_and_packages(environment_file: Path, build_env: str) -> list[str]: + environment_config = read_yaml_file(environment_file) + environment = environment_config.get(build_env) + if environment is None: + raise ValueError(f"Environment {build_env} not found in {ENVIRONMENTS_FILE!s}") + try: + project_config = environment["project"] + environment_type = environment["type"] + deploy = environment["deploy"] + except KeyError: + print( + f" [bold red]ERROR:[/] Environment {build_env} is missing required fields 'project', 'type', or 'deploy' in {ENVIRONMENTS_FILE!s}" + ) + exit(1) + + os.environ["CDF_ENVIRON"] = build_env + os.environ["CDF_BUILD_TYPE"] = environment_type + if (project_env := os.environ.get("CDF_PROJECT", "")) != project_config: + if build_env == "dev" or build_env == "local" or build_env == "demo": + print( + f" [bold yellow]WARNING:[/] Project name mismatch (CDF_PROJECT) between {ENVIRONMENTS_FILE!s} ({project_config}) and what is defined in environment ({project_env})." + ) + print(f" Environment is {build_env}, continuing (would have stopped for staging and prod)...") + else: + print( + f" [bold red]ERROR:[/] Project name mismatch (CDF_PROJECT) between {ENVIRONMENTS_FILE!s} ({project_config}) and what is defined in environment ({project_env=} != {project_config=})." + ) + exit(1) + return deploy + + +def _read_packages(source_module, verbose): + cdf_modules_by_packages = read_yaml_file(source_module / DEFAULT_PACKAGES_FILE).get("packages", {}) + if (package_path := source_module / PACKAGES_FILE).exists(): + local_modules_by_packages = read_yaml_file(package_path).get("packages", {}) + if overwrites := set(cdf_modules_by_packages.keys()) & set(local_modules_by_packages.keys()): + print( + f" [bold yellow]WARNING:[/] Found modules in {PACKAGES_FILE} that are also defined in {DEFAULT_PACKAGES_FILE}:" + ) + for module in overwrites: + print(f" {module}") + print(f" Using the modules defined in {PACKAGES_FILE}.") + modules_by_package = {**cdf_modules_by_packages, **local_modules_by_packages} + else: + modules_by_package = cdf_modules_by_packages + if verbose: + print(" [bold green]INFO:[/] Found defined packages:") + for name, content in modules_by_package.items(): + print(f" {name}: {content}") + return modules_by_package + + def read_yaml_files( yaml_dirs: list[str] | str, name: str | None = None, @@ -151,6 +263,37 @@ def read_yaml_files( return data +@overload +def read_yaml_file(filepath: Path, expected_output: Literal["dict"] = "dict") -> dict[str, Any]: + ... + + +@overload +def read_yaml_file(filepath: Path, expected_output: Literal["list"]) -> list[dict[str, Any]]: + ... + + +def read_yaml_file( + filepath: Path, expected_output: Literal["list", "dict"] = "dict" +) -> dict[str, Any] | list[dict[str, Any]]: + """Read a YAML file and return a dictionary + + filepath: path to the YAML file + """ + try: + config_data = yaml.safe_load(filepath.read_text()) + except yaml.YAMLError as e: + print(f" [bold red]ERROR:[/] reading {filepath}: {e}") + return {} + if expected_output == "list" and isinstance(config_data, dict): + print(f" [bold red]ERROR:[/] {filepath} is not a list") + exit(1) + elif expected_output == "dict" and isinstance(config_data, list): + print(f" [bold red]ERROR:[/] {filepath} is not a dict") + exit(1) + return config_data + + def check_yaml_semantics(parsed: Any, filepath_src: Path, filepath_build: Path, verbose: bool = False) -> bool: """Check the yaml file for semantic errors @@ -280,163 +423,73 @@ def check_yaml_semantics(parsed: Any, filepath_src: Path, filepath_build: Path, def process_config_files( - dirs: list[str], - yaml_data: str, - build_dir: str = "./build", + source_module_dir: Path, + selected_modules: list[str], + build_dir: Path, + config: dict[str, Any], build_env: str = "dev", - clean: bool = False, verbose: bool = False, -): - path = Path(build_dir) - if path.exists(): - if any(path.iterdir()): - if clean: - shutil.rmtree(path) - path.mkdir() - print(f" [bold green]INFO:[/] Cleaned existing build directory {build_dir}.") - else: - print(" [bold yellow]WARNING:[/] Build directory is not empty. Use --clean to remove existing files.") - else: - path.mkdir() +) -> None: + configs = split_config(config) + number_by_resource_type = defaultdict(int) - local_yaml_path = "" - yaml_local = {} - indices = {} - for directory in dirs: + for module_dir, filepaths in iterate_modules(source_module_dir): + if module_dir.name not in selected_modules: + continue if verbose: - print(f" [bold green]INFO:[/] Processing module {directory}") - for dirpath, _, filenames in os.walk(directory): - # Sort to support 1., 2. etc prefixes - filenames.sort() - # When we have traversed out of the module, reset the local yaml config - if local_yaml_path not in dirpath: - local_yaml_path == "" - yaml_local = {} - for file_name in filenames: - # Find the root folder and drop processing all files in this dolder - if file_name == "config.yaml" or file_name == "default.config.yaml": - # Pick up this local yaml files - local_yaml_path = dirpath - yaml_local = read_yaml_files([dirpath]) - filenames = [] - for file_name in filenames: - if file_name in EXCL_FILES: - continue - if verbose: - print(f" [bold green]INFO:[/] Processing {file_name}") - split_path = Path(dirpath).parts - cdf_path = split_path[len(split_path) - 1] - new_path = Path(f"{build_dir}/{cdf_path}") - new_path.mkdir(exist_ok=True, parents=True) - if (Path(dirpath) / file_name).suffix.lower()[1:] not in PROC_TMPL_VARS_SUFFIX: - shutil.copyfile(Path(dirpath) / file_name, new_path / file_name) - continue - with open(dirpath + "/" + file_name) as f: - content = f.read() - # Replace the local yaml variables - for k, v in yaml_local.items(): - if "." in k: - # If the key has a dot, it is a build_env specific variable. - # Skip if it's the wrong environment. - if k.split(".")[0] != build_env: - continue - k = k.split(".", 2)[1] - # assuming template variables are in the format {{key}} - # TODO: issue warning if key is not found, this can indicate a config file error - content = content.replace(f"{{{{{k}}}}}", str(v)) - # Replace the root yaml variables - for k, v in yaml_data.items(): - if "." in k: - # If the key has a dot, it is a build_env specific variable. - # Skip if it's the wrong environment. - if k.split(".")[0] != build_env: - continue - k = k.split(".", 2)[1] - # assuming template variables are in the format {{key}} - content = content.replace(f"{{{{{k}}}}}", str(v)) - orig_file = Path(dirpath) / file_name - # For .sql and other dependent files, we do not prefix as we expect them - # to be named with the external_id of the entitiy they are associated with. - if file_name.split(".")[-1] not in EXCL_INDEX_SUFFIX: - if not indices.get(cdf_path): - indices[cdf_path] = 1 - else: - indices[cdf_path] += 1 - # Get rid of the local index - if re.match("^[0-9]+\\.", file_name): - file_name = file_name.split(".", 1)[1] - file_name = f"{indices[cdf_path]}.{file_name}" - - filepath = new_path / file_name - for unmatched in re.findall(pattern=r"\{\{.*?\}\}", string=content): - print( - f" [bold yellow]WARNING:[/] Unresolved template variable {unmatched} in {new_path}/{file_name}" - ) + print(f" [bold green]INFO:[/] Processing module {module_dir.name}") + local_config = create_local_config(configs, module_dir) + # Sort to support 1., 2. etc prefixes + filepaths.sort() + for filepath in filepaths: + if verbose: + print(f" [bold green]INFO:[/] Processing {filepath.name}") - filepath.write_text(content) + if filepath.suffix.lower() not in PROC_TMPL_VARS_SUFFIX: + # Copy the file as is, not variable replacement + destination = build_dir / filepath.parent.name / filepath.name + destination.parent.mkdir(parents=True, exist_ok=True) + shutil.copyfile(filepath, destination) + continue - if filepath.suffix in {".yaml", ".yml"}: - try: - parsed = yaml.safe_load(content) - except yaml.YAMLError as e: - print( - f" [bold red]ERROR:[/] YAML validation error for {file_name} after substituting config variables: \n{e}" - ) - exit(1) - - if isinstance(parsed, dict): - parsed = [parsed] - for item in parsed: - if not check_yaml_semantics( - parsed=item, - filepath_src=orig_file, - filepath_build=filepath, - ): - exit(1) - loader = LOADER_BY_FOLDER_NAME.get(filepath.parent.name) - if len(loader) == 1: - loader = loader[0] - else: - loader = next( - (loader for loader in loader if re.match(loader.filename_pattern, filepath.stem)), None - ) - if loader: - load_warnings = validate_case_raw( - parsed, loader.resource_cls, filepath, identifier_key=loader.identifier_key - ) - if load_warnings: - print(f" [bold yellow]WARNING:[/]{generate_warnings_report(load_warnings, indent=1)}") + content = filepath.read_text() + content = replace_variables(content, local_config, build_env) + filename = create_file_name(filepath, number_by_resource_type) + + destination = build_dir / filepath.parent.name / filename + destination.parent.mkdir(parents=True, exist_ok=True) + destination.write_text(content) + + validate(content, destination, filepath) def build_config( - build_dir: str = "./build", - source_dir: str = "./", + build_dir: Path, + source_dir: Path, + config_file: Path, + environment_file: Path, build_env: str = "dev", clean: bool = False, - verbose=False, + verbose: bool = False, ): if build_env is None: raise ValueError("build_env must be specified") - if not source_dir.endswith("/"): - source_dir = source_dir + "/" - modules = read_environ_config( - root_dir=source_dir, - tmpl_dirs=TMPL_DIRS, - build_env=build_env, - verbose=verbose, - ) - process_config_files( - dirs=modules, - yaml_data=read_yaml_files(yaml_dirs=source_dir), - build_dir=build_dir, - build_env=build_env, - clean=clean, - verbose=verbose, - ) - # Copy the root deployment yaml files - shutil.copyfile(Path(source_dir) / "local.yaml", Path(build_dir) / "local.yaml") - shutil.copyfile(Path(source_dir) / "packages.yaml", Path(build_dir) / "packages.yaml") - shutil.copyfile(Path(source_dir) / "default.packages.yaml", Path(build_dir) / "default.packages.yaml") + if build_dir.exists(): + if any(build_dir.iterdir()): + if clean: + shutil.rmtree(build_dir) + build_dir.mkdir() + print(f" [bold green]INFO:[/] Cleaned existing build directory {build_dir!s}.") + else: + print(" [bold yellow]WARNING:[/] Build directory is not empty. Use --clean to remove existing files.") + else: + build_dir.mkdir() + source_module_dir = source_dir / COGNITE_MODULES + + selected_modules = get_selected_modules(source_module_dir, environment_file, build_env, verbose) + + config = read_yaml_file(config_file) + process_config_files(source_module_dir, selected_modules, build_dir, config, build_env, verbose) def generate_warnings_report(load_warnings: list[LoadWarning], indent: int = 0) -> str: @@ -450,3 +503,321 @@ def generate_warnings_report(load_warnings: list[LoadWarning], indent: int = 0) report.append(f"{' '*(indent+1)}{warning!s}") return "\n".join(report) + + +def generate_config( + directory: Path | Sequence[Path], include_modules: set[str] | None = None, existing_config: str | None = None +) -> tuple[str, ConfigEntries]: + """Generate a config dictionary from the default.config.yaml files in the given directories. + + You can specify a set of modules to include in the config. If you do not specify any modules, all modules will be included. + + Args: + directory: A root directory to search for default.config.yaml files. + include_modules: A set of modules to include in the config. If None, all modules will be included. + existing_config: An existing config dictionary to + + Returns: + A config dictionary. + """ + yaml_loader = YAML() + config = (existing_config and yaml_loader.load(existing_config)) or CommentedMap() + if not directory.exists(): + raise ValueError(f"Directory {directory} does not exist") + entries = ConfigEntries((existing_config and yaml.safe_load(existing_config)) or None) + if isinstance(directory, Path): + directories = [directory] + else: + directories = directory + + for dir_ in directories: + defaults = sorted(directory.glob(f"**/{DEFAULT_CONFIG_FILE}"), key=lambda f: f.relative_to(dir_)) + + for default_config in defaults: + if include_modules is not None and default_config.parent.name not in include_modules: + continue + file_data = yaml_loader.load(default_config.read_text()) + parts = default_config.relative_to(directory).parent.parts + if len(parts) == 0: + # This is a root config file + for key, value in file_data.items(): + config[key] = value + entries.append( + ConfigEntry( + key=key, + module="", + path="", + last_value=None, + current_value=value, + ) + ) + continue + local_config = config + for key in parts: + if key not in local_config: + local_config[key] = CommentedMap() + local_config = local_config[key] + + for key, value in file_data.items(): + local_config[key] = value + entries.append( + ConfigEntry( + key=key, + module=default_config.parent.name, + path=".".join(parts[:-1]), + last_value=None, + current_value=value, + ) + ) + for removed in entries.removed: + parts = removed.path.split(".") + parts.append(removed.module) + local_config = config + last_config = None + for key in parts: + last_config = local_config + local_config = local_config[key] + del local_config[removed.key] + if not local_config: + del last_config[removed.module] + + output = io.StringIO() + yaml_loader.dump(config, output) + output_yaml = output.getvalue() + # Indent comments + output_lines = [] + leading_spaces = 0 + for line in output_yaml.splitlines(): + if line.lstrip().startswith("#"): + line = f"{' '*leading_spaces}{line}" + else: + leading_spaces = len(line) - len(line.lstrip()) + output_lines.append(line) + return output_yaml, entries + + +@dataclass +class ConfigEntries(UserList): + def __init__(self, entries: list[ConfigEntry] | dict | None = None): + if isinstance(entries, dict): + entries = self._initialize(entries) + super().__init__(entries or []) + self._lookup = {} + for entry in self: + self._lookup.setdefault(entry.module, {})[entry.key] = entry + + @staticmethod + def _initialize(entries: dict, path: str = "") -> list[ConfigEntry]: + results = [] + if "." in path: + path_to, module = path.rsplit(".", maxsplit=1) + else: + module = path + path_to = "" + for key, value in entries.items(): + if isinstance(value, dict): + results.extend(ConfigEntries._initialize(value, f"{path}.{key}" if path else key)) + else: + results.append( + ConfigEntry( + key=key, + module=module, + path=path_to, + last_value=value, + current_value=None, + ) + ) + return results + + def append(self, item: ConfigEntry) -> None: + if item.module not in self._lookup: + self._lookup[item.module] = {} + if item.key not in self._lookup[item.module]: + self._lookup[item.module][item.key] = item + super().append(item) + else: + self._lookup[item.module][item.key].current_value = item.current_value + + @property + def changed(self) -> list[ConfigEntry]: + return [entry for entry in self if entry.changed] + + @property + def removed(self) -> list[ConfigEntry]: + return [entry for entry in self if entry.removed] + + @property + def added(self) -> list[ConfigEntry]: + return [entry for entry in self if entry.added] + + @property + def unchanged(self) -> list[ConfigEntry]: + return [entry for entry in self if entry.unchanged] + + def __str__(self) -> str: + total_variables = len(self) + lines = [] + if removed := self.removed: + lines.append(f"Removed {len(removed)} variables from config.yaml: {[str(r) for r in removed]}") + if added := self.added: + lines.append(f"Added {len(added)} variables to config.yaml: {[str(a) for a in added]}") + if changed := self.changed: + lines.append(f"Changed {len(changed)} variables in config.yaml: {[str(c) for c in changed]}") + if total_variables == len(self.unchanged): + lines.append("No variables in config.yaml was changed.") + return "\n".join(lines) + + +@dataclass +class ConfigEntry: + key: str + module: str + path: str + last_value: Any | None + current_value: Any | None + + @property + def changed(self) -> bool: + return self.last_value is not None and self.current_value is not None and self.last_value != self.current_value + + @property + def removed(self) -> bool: + return self.last_value is not None and self.current_value is None + + @property + def added(self) -> bool: + return self.last_value is None and self.current_value is not None + + @property + def unchanged(self) -> bool: + return self.last_value is not None and self.current_value is not None and self.last_value == self.current_value + + def __str__(self): + prefix = self._prefix() + if self.removed: + return f"{prefix}{self.key} was removed" + elif self.added: + return f"{prefix}{self.key} was added" + elif self.changed: + return f"{prefix}{self.key} changed from {self.last_value!r} to {self.current_value!r}" + else: + return f"{prefix}{self.key} is unchanged" + + def __repr__(self): + prefix = self._prefix() + return f"{prefix}{self.key}={self.current_value!r}" + + def _prefix(self): + parts = [] + if self.path: + parts.append(self.path) + if self.module: + parts.append(self.module) + prefix = "" + if parts: + prefix = ".".join(parts) + "." + return prefix + + +def iterate_modules(root_dir: Path) -> tuple[Path, list[Path]]: + for module_dir in root_dir.rglob("*"): + if not module_dir.is_dir(): + continue + module_directories = [path for path in module_dir.iterdir() if path.is_dir()] + is_all_resource_directories = all(dir.name in LOADER_BY_FOLDER_NAME for dir in module_directories) + if module_directories and is_all_resource_directories: + yield module_dir, [path for path in module_dir.rglob("*") if path.is_file() and path.name not in EXCL_FILES] + + +def create_local_config(config: dict[str, Any], module_dir: Path) -> Mapping[str, str]: + maps = [] + parts = module_dir.parts + if parts[0] != COGNITE_MODULES and COGNITE_MODULES in parts: + parts = parts[parts.index(COGNITE_MODULES) :] + if parts[0] != CUSTOM_MODULES and CUSTOM_MODULES in parts: + parts = parts[parts.index(CUSTOM_MODULES) :] + for no in range(len(parts), -1, -1): + if c := config.get(".".join(parts[:no])): + maps.append(c) + return ChainMap(*maps) + + +def split_config(config: dict[str, Any]) -> dict[str, dict[str, str]]: + configs = {} + _split_config(config, configs, prefix="") + return configs + + +def _split_config(config: dict[str, Any], configs: dict[str, dict[str, str]], prefix: str = "") -> None: + for key, value in config.items(): + if isinstance(value, dict): + if prefix and not prefix.endswith("."): + prefix = f"{prefix}." + _split_config(value, configs, prefix=f"{prefix}{key}") + else: + configs.setdefault(prefix.removesuffix("."), {})[key] = value + + +def create_file_name(filepath: Path, number_by_resource_type: dict[str, int]) -> str: + filename = filepath.name + if filepath.suffix in EXCL_INDEX_SUFFIX: + return filename + # Get rid of the local index + filename = re.sub("^[0-9]+\\.", "", filename) + number_by_resource_type[filepath.parent.name] += 1 + filename = f"{number_by_resource_type[filepath.parent.name]}.{filename}" + return filename + + +def replace_variables(content: str, local_config: Mapping[str, str], build_env: str) -> str: + for name, variable in local_config.items(): + if "." in name: + # If the key has a dot, it is a build_env specific variable. + # Skip if it's the wrong environment. + env, name = name.split(".", 1) + if env != build_env: + continue + content = content.replace(f"{{{{{name}}}}}", str(variable)) + return content + + +def validate(content: str, destination: Path, source_path: Path) -> None: + for unmatched in re.findall(pattern=r"\{\{.*?\}\}", string=content): + print(f" [bold yellow]WARNING:[/] Unresolved template variable {unmatched} in {destination!s}") + + if destination.suffix in {".yaml", ".yml"}: + try: + parsed = yaml.safe_load(content) + except yaml.YAMLError as e: + print( + f" [bold red]ERROR:[/] YAML validation error for {destination.name} after substituting config variables: \n{e}" + ) + exit(1) + + if isinstance(parsed, dict): + parsed = [parsed] + for item in parsed: + if not check_yaml_semantics( + parsed=item, + filepath_src=source_path, + filepath_build=destination, + ): + exit(1) + loader = LOADER_BY_FOLDER_NAME.get(destination.parent.name) + if len(loader) == 1: + loader = loader[0] + else: + loader = next((loader for loader in loader if re.match(loader.filename_pattern, destination.stem)), None) + if loader: + load_warnings = validate_case_raw( + parsed, loader.resource_cls, destination, identifier_key=loader.identifier_key + ) + if load_warnings: + print(f" [bold yellow]WARNING:[/]{generate_warnings_report(load_warnings, indent=1)}") + + +if __name__ == "__main__": + target_dir = Path(__file__).resolve().parent.parent + config_str, differences = generate_config(target_dir, existing_config=(target_dir / CONFIG_FILE).read_text()) + (target_dir / CONFIG_FILE).write_text(config_str) + print(str(differences)) diff --git a/cognite_toolkit/cognite_modules/README.md b/cognite_toolkit/cognite_modules/README.md new file mode 100644 index 000000000..e1799be45 --- /dev/null +++ b/cognite_toolkit/cognite_modules/README.md @@ -0,0 +1,26 @@ +# Main modules folder + +** YOU SHOULD NOT EDIT ANY OF THE FILES IN THIS DIRECTORY AND SUB-DIRECTORIES ** + +Modules in this folder come bundled with the `cdf-tk` tool. They are managed +from a [public repository](https://github.com/cognitedata/cdf-project-templates). + +The modules prefixed by `cdf_` are managed and supported by Cognite. You should put your own modules in +the local_modules/ directory. + +In the root of this directory, you will find the `default.config.yaml` file that defines globally available +configuration variables. These can be used also in your own modules. For each of the modules, you will +find a `default.config.yaml` file that defines the default module-specific configuration variables. + +As part of a `cdf-tk init`, these default variables will be copied to the `config.yaml` file in the +root of your project directory. You can then override these default values in that `config.yaml` file. + +The modules are grouped into sub-directories: + +* **common**: these modules are CDF project wide and are not specific to any particular solution. +* **examples**: these modules are meant to be copied to `local_modules`, renamed, and used as a starting point + for your own modules. +* ****: e.g. apm and infield. These modules are specific to a particular solution. Typically, + a solution like infield consists of multiple modules. + +See the [module and package documentation](https://developer.cognite.com/sdks/toolkit/references/module_reference) for an introduction. diff --git a/cognite_toolkit/common/cdf_auth_readwrite_all/README.md b/cognite_toolkit/cognite_modules/common/cdf_auth_readwrite_all/README.md similarity index 100% rename from cognite_toolkit/common/cdf_auth_readwrite_all/README.md rename to cognite_toolkit/cognite_modules/common/cdf_auth_readwrite_all/README.md diff --git a/cognite_toolkit/common/cdf_auth_readwrite_all/auth/readonly.all.group.yaml b/cognite_toolkit/cognite_modules/common/cdf_auth_readwrite_all/auth/readonly.all.group.yaml similarity index 100% rename from cognite_toolkit/common/cdf_auth_readwrite_all/auth/readonly.all.group.yaml rename to cognite_toolkit/cognite_modules/common/cdf_auth_readwrite_all/auth/readonly.all.group.yaml diff --git a/cognite_toolkit/common/cdf_auth_readwrite_all/auth/readwrite.all.group.yaml b/cognite_toolkit/cognite_modules/common/cdf_auth_readwrite_all/auth/readwrite.all.group.yaml similarity index 100% rename from cognite_toolkit/common/cdf_auth_readwrite_all/auth/readwrite.all.group.yaml rename to cognite_toolkit/cognite_modules/common/cdf_auth_readwrite_all/auth/readwrite.all.group.yaml diff --git a/cognite_toolkit/common/cdf_auth_readwrite_all/default.config.yaml b/cognite_toolkit/cognite_modules/common/cdf_auth_readwrite_all/default.config.yaml similarity index 100% rename from cognite_toolkit/common/cdf_auth_readwrite_all/default.config.yaml rename to cognite_toolkit/cognite_modules/common/cdf_auth_readwrite_all/default.config.yaml diff --git a/cognite_toolkit/modules/cdf_apm_base/README.md b/cognite_toolkit/cognite_modules/core/cdf_apm_base/README.md similarity index 100% rename from cognite_toolkit/modules/cdf_apm_base/README.md rename to cognite_toolkit/cognite_modules/core/cdf_apm_base/README.md diff --git a/cognite_toolkit/modules/cdf_apm_base/data_models/1.Activity.container.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/1.Activity.container.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_apm_base/data_models/1.Activity.container.yaml rename to cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/1.Activity.container.yaml diff --git a/cognite_toolkit/modules/cdf_apm_base/data_models/1.Activity.view.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/1.Activity.view.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_apm_base/data_models/1.Activity.view.yaml rename to cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/1.Activity.view.yaml diff --git a/cognite_toolkit/modules/cdf_apm_base/data_models/2.Operation.container.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/2.Operation.container.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_apm_base/data_models/2.Operation.container.yaml rename to cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/2.Operation.container.yaml diff --git a/cognite_toolkit/modules/cdf_apm_base/data_models/2.Operation.view.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/2.Operation.view.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_apm_base/data_models/2.Operation.view.yaml rename to cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/2.Operation.view.yaml diff --git a/cognite_toolkit/modules/cdf_apm_base/data_models/3.Notification.container.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/3.Notification.container.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_apm_base/data_models/3.Notification.container.yaml rename to cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/3.Notification.container.yaml diff --git a/cognite_toolkit/modules/cdf_apm_base/data_models/3.Notification.view.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/3.Notification.view.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_apm_base/data_models/3.Notification.view.yaml rename to cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/3.Notification.view.yaml diff --git a/cognite_toolkit/modules/cdf_apm_base/data_models/4.ApmConfig.container.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/4.ApmConfig.container.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_apm_base/data_models/4.ApmConfig.container.yaml rename to cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/4.ApmConfig.container.yaml diff --git a/cognite_toolkit/modules/cdf_apm_base/data_models/4.ApmConfig.view.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/4.ApmConfig.view.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_apm_base/data_models/4.ApmConfig.view.yaml rename to cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/4.ApmConfig.view.yaml diff --git a/cognite_toolkit/modules/cdf_apm_base/data_models/APM_Config.space.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/APM_Config.space.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_apm_base/data_models/APM_Config.space.yaml rename to cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/APM_Config.space.yaml diff --git a/cognite_toolkit/modules/cdf_apm_base/data_models/apm.datamodel.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/apm.datamodel.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_apm_base/data_models/apm.datamodel.yaml rename to cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/apm.datamodel.yaml diff --git a/cognite_toolkit/modules/cdf_apm_base/data_models/apm_config.datamodel.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/apm_config.datamodel.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_apm_base/data_models/apm_config.datamodel.yaml rename to cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/apm_config.datamodel.yaml diff --git a/cognite_toolkit/modules/cdf_apm_base/data_models/apm_data_model.space.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/apm_data_model.space.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_apm_base/data_models/apm_data_model.space.yaml rename to cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/apm_data_model.space.yaml diff --git a/cognite_toolkit/modules/cdf_apm_base/default.config.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/default.config.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_apm_base/default.config.yaml rename to cognite_toolkit/cognite_modules/core/cdf_apm_base/default.config.yaml diff --git a/cognite_toolkit/default.config.yaml b/cognite_toolkit/cognite_modules/default.config.yaml similarity index 100% rename from cognite_toolkit/default.config.yaml rename to cognite_toolkit/cognite_modules/default.config.yaml diff --git a/cognite_toolkit/default.packages.yaml b/cognite_toolkit/cognite_modules/default.packages.yaml similarity index 100% rename from cognite_toolkit/default.packages.yaml rename to cognite_toolkit/cognite_modules/default.packages.yaml diff --git a/cognite_toolkit/examples/README.md b/cognite_toolkit/cognite_modules/examples/README.md similarity index 100% rename from cognite_toolkit/examples/README.md rename to cognite_toolkit/cognite_modules/examples/README.md diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/README.md b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/README.md similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/README.md rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/README.md diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/1.Asset.container.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/1.Asset.container.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/1.Asset.container.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/1.Asset.container.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/2.WorkOrder.container.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/2.WorkOrder.container.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/2.WorkOrder.container.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/2.WorkOrder.container.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/3.WorkItem.container.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/3.WorkItem.container.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/3.WorkItem.container.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/3.WorkItem.container.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/4.Asset.view.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/4.Asset.view.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/4.Asset.view.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/4.Asset.view.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/5.WorkItem.view.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/5.WorkItem.view.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/5.WorkItem.view.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/5.WorkItem.view.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/6.WorkOrder.view.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/6.WorkOrder.view.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/6.WorkOrder.view.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/6.WorkOrder.view.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/7.apm_simple.datamodel.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/7.apm_simple.datamodel.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/7.apm_simple.datamodel.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/7.apm_simple.datamodel.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/apm_simple.space.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/apm_simple.space.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/data_models/apm_simple.space.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/apm_simple.space.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/default.config.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/default.config.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/default.config.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/default.config.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.sql b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.sql similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.sql rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.sql diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.sql b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.sql similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.sql rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.sql diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.sql b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.sql similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.sql rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.sql diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.sql b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.sql similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.sql rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.sql diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.sql b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.sql similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.sql rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.sql diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.sql b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.sql similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.sql rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.sql diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.sql b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.sql similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.sql rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.sql diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.sql b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.sql similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.sql rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.sql diff --git a/cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml diff --git a/cognite_toolkit/examples/cdf_oid_example_data/LICENSE.md b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/LICENSE.md similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/LICENSE.md rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/LICENSE.md diff --git a/cognite_toolkit/examples/cdf_oid_example_data/Open Industrial Data - Terms of Use.pdf b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/Open Industrial Data - Terms of Use.pdf similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/Open Industrial Data - Terms of Use.pdf rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/Open Industrial Data - Terms of Use.pdf diff --git a/cognite_toolkit/examples/cdf_oid_example_data/README.md b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/README.md similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/README.md rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/README.md diff --git a/cognite_toolkit/examples/cdf_oid_example_data/data_sets/data_sets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/data_sets/data_sets.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/data_sets/data_sets.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/data_sets/data_sets.yaml diff --git a/cognite_toolkit/examples/cdf_oid_example_data/default.config.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/default.config.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/default.config.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/default.config.yaml diff --git a/cognite_toolkit/examples/cdf_oid_example_data/files/PH-25578-P-4110006-001.pdf b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-25578-P-4110006-001.pdf similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/files/PH-25578-P-4110006-001.pdf rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-25578-P-4110006-001.pdf diff --git a/cognite_toolkit/examples/cdf_oid_example_data/files/PH-25578-P-4110010-001.pdf b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-25578-P-4110010-001.pdf similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/files/PH-25578-P-4110010-001.pdf rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-25578-P-4110010-001.pdf diff --git a/cognite_toolkit/examples/cdf_oid_example_data/files/PH-25578-P-4110119-001.pdf b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-25578-P-4110119-001.pdf similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/files/PH-25578-P-4110119-001.pdf rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-25578-P-4110119-001.pdf diff --git a/cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0003-001.pdf b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0003-001.pdf similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0003-001.pdf rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0003-001.pdf diff --git a/cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0004-001.pdf b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0004-001.pdf similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0004-001.pdf rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0004-001.pdf diff --git a/cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0151-001.pdf b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0151-001.pdf similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0151-001.pdf rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0151-001.pdf diff --git a/cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0152-001.pdf b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0152-001.pdf similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0152-001.pdf rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0152-001.pdf diff --git a/cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0153-001.pdf b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0153-001.pdf similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0153-001.pdf rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0153-001.pdf diff --git a/cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0156-001.pdf b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0156-001.pdf similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0156-001.pdf rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0156-001.pdf diff --git a/cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0156-002.pdf b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0156-002.pdf similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0156-002.pdf rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0156-002.pdf diff --git a/cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0160-001.pdf b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0160-001.pdf similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/files/PH-ME-P-0160-001.pdf rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/PH-ME-P-0160-001.pdf diff --git a/cognite_toolkit/examples/cdf_oid_example_data/files/files.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/files.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/files/files.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/files/files.yaml diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/asset2children.csv b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/asset2children.csv similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/asset2children.csv rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/asset2children.csv diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/asset2children.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/asset2children.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/asset2children.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/asset2children.yaml diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/assets.csv b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/assets.csv similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/assets.csv rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/assets.csv diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/assets.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/assets.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/assets.yaml diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/files_metadata.csv b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/files_metadata.csv similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/files_metadata.csv rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/files_metadata.csv diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/files_metadata.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/files_metadata.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/files_metadata.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/files_metadata.yaml diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/timeseries2assets.csv b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/timeseries2assets.csv similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/timeseries2assets.csv rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/timeseries2assets.csv diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/timeseries2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/timeseries2assets.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/timeseries2assets.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/timeseries2assets.yaml diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/workitem2assets.csv b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workitem2assets.csv similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/workitem2assets.csv rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workitem2assets.csv diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/workitem2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workitem2assets.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/workitem2assets.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workitem2assets.yaml diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/workitems.csv b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workitems.csv similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/workitems.csv rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workitems.csv diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/workitems.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workitems.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/workitems.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workitems.yaml diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/workorder2assets.csv b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workorder2assets.csv similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/workorder2assets.csv rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workorder2assets.csv diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/workorder2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workorder2assets.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/workorder2assets.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workorder2assets.yaml diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/workorder2items.csv b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workorder2items.csv similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/workorder2items.csv rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workorder2items.csv diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/workorder2items.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workorder2items.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/workorder2items.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workorder2items.yaml diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/workorders.csv b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workorders.csv similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/workorders.csv rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workorders.csv diff --git a/cognite_toolkit/examples/cdf_oid_example_data/raw/workorders.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workorders.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/raw/workorders.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/raw/workorders.yaml diff --git a/cognite_toolkit/examples/cdf_oid_example_data/timeseries/timeseries.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/timeseries/timeseries.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/timeseries/timeseries.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/timeseries/timeseries.yaml diff --git a/cognite_toolkit/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.sql b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.sql similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.sql rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.sql diff --git a/cognite_toolkit/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml diff --git a/cognite_toolkit/examples/example_pump_asset_hierarchy/README.md b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/README.md similarity index 100% rename from cognite_toolkit/examples/example_pump_asset_hierarchy/README.md rename to cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/README.md diff --git a/cognite_toolkit/examples/example_pump_asset_hierarchy/data_sets/data_sets.yaml b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/data_sets/data_sets.yaml similarity index 100% rename from cognite_toolkit/examples/example_pump_asset_hierarchy/data_sets/data_sets.yaml rename to cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/data_sets/data_sets.yaml diff --git a/cognite_toolkit/examples/example_pump_asset_hierarchy/default.config.yaml b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/default.config.yaml similarity index 100% rename from cognite_toolkit/examples/example_pump_asset_hierarchy/default.config.yaml rename to cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/default.config.yaml diff --git a/cognite_toolkit/examples/example_pump_asset_hierarchy/raw/collections_pump.csv b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/raw/collections_pump.csv similarity index 100% rename from cognite_toolkit/examples/example_pump_asset_hierarchy/raw/collections_pump.csv rename to cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/raw/collections_pump.csv diff --git a/cognite_toolkit/examples/example_pump_asset_hierarchy/raw/collections_pump.yaml b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/raw/collections_pump.yaml similarity index 100% rename from cognite_toolkit/examples/example_pump_asset_hierarchy/raw/collections_pump.yaml rename to cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/raw/collections_pump.yaml diff --git a/cognite_toolkit/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.sql b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.sql similarity index 100% rename from cognite_toolkit/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.sql rename to cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.sql diff --git a/cognite_toolkit/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml similarity index 100% rename from cognite_toolkit/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml rename to cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml diff --git a/cognite_toolkit/experimental/README.md b/cognite_toolkit/cognite_modules/experimental/README.md similarity index 100% rename from cognite_toolkit/experimental/README.md rename to cognite_toolkit/cognite_modules/experimental/README.md diff --git a/cognite_toolkit/experimental/cdf_asset_source_model/README.md b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/README.md similarity index 100% rename from cognite_toolkit/experimental/cdf_asset_source_model/README.md rename to cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/README.md diff --git a/cognite_toolkit/experimental/cdf_asset_source_model/data_models/1.InstanceSpace.space.yaml b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/data_models/1.InstanceSpace.space.yaml similarity index 100% rename from cognite_toolkit/experimental/cdf_asset_source_model/data_models/1.InstanceSpace.space.yaml rename to cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/data_models/1.InstanceSpace.space.yaml diff --git a/cognite_toolkit/experimental/cdf_asset_source_model/data_models/2.ModelSpace.space.yaml b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/data_models/2.ModelSpace.space.yaml similarity index 100% rename from cognite_toolkit/experimental/cdf_asset_source_model/data_models/2.ModelSpace.space.yaml rename to cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/data_models/2.ModelSpace.space.yaml diff --git a/cognite_toolkit/experimental/cdf_asset_source_model/data_models/ExtendedAsset.container.yaml b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/data_models/ExtendedAsset.container.yaml similarity index 100% rename from cognite_toolkit/experimental/cdf_asset_source_model/data_models/ExtendedAsset.container.yaml rename to cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/data_models/ExtendedAsset.container.yaml diff --git a/cognite_toolkit/experimental/cdf_asset_source_model/data_models/ExtendedAsset.view.yaml b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/data_models/ExtendedAsset.view.yaml similarity index 100% rename from cognite_toolkit/experimental/cdf_asset_source_model/data_models/ExtendedAsset.view.yaml rename to cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/data_models/ExtendedAsset.view.yaml diff --git a/cognite_toolkit/experimental/cdf_asset_source_model/data_models/ExtendedSourceData.datamodel.yaml b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/data_models/ExtendedSourceData.datamodel.yaml similarity index 100% rename from cognite_toolkit/experimental/cdf_asset_source_model/data_models/ExtendedSourceData.datamodel.yaml rename to cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/data_models/ExtendedSourceData.datamodel.yaml diff --git a/cognite_toolkit/experimental/cdf_asset_source_model/default.config.yaml b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/default.config.yaml similarity index 100% rename from cognite_toolkit/experimental/cdf_asset_source_model/default.config.yaml rename to cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/default.config.yaml diff --git a/cognite_toolkit/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.sql b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.sql similarity index 100% rename from cognite_toolkit/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.sql rename to cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.sql diff --git a/cognite_toolkit/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml similarity index 100% rename from cognite_toolkit/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml rename to cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml diff --git a/cognite_toolkit/experimental/example_pump_data_model/README.md b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/README.md similarity index 100% rename from cognite_toolkit/experimental/example_pump_data_model/README.md rename to cognite_toolkit/cognite_modules/experimental/example_pump_data_model/README.md diff --git a/cognite_toolkit/experimental/example_pump_data_model/data_models/1.spaces.space.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_models/1.spaces.space.yaml similarity index 100% rename from cognite_toolkit/experimental/example_pump_data_model/data_models/1.spaces.space.yaml rename to cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_models/1.spaces.space.yaml diff --git a/cognite_toolkit/experimental/example_pump_data_model/data_models/2.Pump.container.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_models/2.Pump.container.yaml similarity index 100% rename from cognite_toolkit/experimental/example_pump_data_model/data_models/2.Pump.container.yaml rename to cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_models/2.Pump.container.yaml diff --git a/cognite_toolkit/experimental/example_pump_data_model/data_models/3.LiftStation.view.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_models/3.LiftStation.view.yaml similarity index 100% rename from cognite_toolkit/experimental/example_pump_data_model/data_models/3.LiftStation.view.yaml rename to cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_models/3.LiftStation.view.yaml diff --git a/cognite_toolkit/experimental/example_pump_data_model/data_models/4.Pump.view.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_models/4.Pump.view.yaml similarity index 100% rename from cognite_toolkit/experimental/example_pump_data_model/data_models/4.Pump.view.yaml rename to cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_models/4.Pump.view.yaml diff --git a/cognite_toolkit/experimental/example_pump_data_model/data_models/5.PumpLiftStations.datamodel.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_models/5.PumpLiftStations.datamodel.yaml similarity index 100% rename from cognite_toolkit/experimental/example_pump_data_model/data_models/5.PumpLiftStations.datamodel.yaml rename to cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_models/5.PumpLiftStations.datamodel.yaml diff --git a/cognite_toolkit/experimental/example_pump_data_model/default.config.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/default.config.yaml similarity index 100% rename from cognite_toolkit/experimental/example_pump_data_model/default.config.yaml rename to cognite_toolkit/cognite_modules/experimental/example_pump_data_model/default.config.yaml diff --git a/cognite_toolkit/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.sql b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.sql similarity index 100% rename from cognite_toolkit/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.sql rename to cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.sql diff --git a/cognite_toolkit/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml similarity index 100% rename from cognite_toolkit/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml rename to cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml diff --git a/cognite_toolkit/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.sql b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.sql similarity index 100% rename from cognite_toolkit/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.sql rename to cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.sql diff --git a/cognite_toolkit/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml similarity index 100% rename from cognite_toolkit/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml rename to cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml diff --git a/cognite_toolkit/modules/cdf_infield_common/README.md b/cognite_toolkit/cognite_modules/infield/cdf_infield_common/README.md similarity index 100% rename from cognite_toolkit/modules/cdf_infield_common/README.md rename to cognite_toolkit/cognite_modules/infield/cdf_infield_common/README.md diff --git a/cognite_toolkit/modules/cdf_infield_common/auth/applications-configuration.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_common/auth/applications-configuration.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_common/auth/applications-configuration.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_common/auth/applications-configuration.yaml diff --git a/cognite_toolkit/modules/cdf_infield_common/data_models/infieldAppData.space.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_common/data_models/infieldAppData.space.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_common/data_models/infieldAppData.space.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_common/data_models/infieldAppData.space.yaml diff --git a/cognite_toolkit/modules/cdf_infield_common/default.config.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_common/default.config.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_common/default.config.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_common/default.config.yaml diff --git a/cognite_toolkit/modules/cdf_infield_location/README.md b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/README.md similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/README.md rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/README.md diff --git a/cognite_toolkit/modules/cdf_infield_location/auth/infield_checklist_admin_role.group.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/auth/infield_checklist_admin_role.group.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/auth/infield_checklist_admin_role.group.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/auth/infield_checklist_admin_role.group.yaml diff --git a/cognite_toolkit/modules/cdf_infield_location/auth/infield_normal_role.group.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/auth/infield_normal_role.group.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/auth/infield_normal_role.group.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/auth/infield_normal_role.group.yaml diff --git a/cognite_toolkit/modules/cdf_infield_location/auth/infield_template_admin_role.group.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/auth/infield_template_admin_role.group.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/auth/infield_template_admin_role.group.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/auth/infield_template_admin_role.group.yaml diff --git a/cognite_toolkit/modules/cdf_infield_location/auth/infield_viewer_role.group.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/auth/infield_viewer_role.group.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/auth/infield_viewer_role.group.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/auth/infield_viewer_role.group.yaml diff --git a/cognite_toolkit/modules/cdf_infield_location/data_models/infieldLocationAppData.space.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_models/infieldLocationAppData.space.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/data_models/infieldLocationAppData.space.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_models/infieldLocationAppData.space.yaml diff --git a/cognite_toolkit/modules/cdf_infield_location/data_models/infieldLocationSourceData.space.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_models/infieldLocationSourceData.space.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/data_models/infieldLocationSourceData.space.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_models/infieldLocationSourceData.space.yaml diff --git a/cognite_toolkit/modules/cdf_infield_location/data_models/infield_apm_app_config.node.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_models/infield_apm_app_config.node.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/data_models/infield_apm_app_config.node.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_models/infield_apm_app_config.node.yaml diff --git a/cognite_toolkit/modules/cdf_infield_location/data_sets/location_app_data_set.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/location_app_data_set.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/data_sets/location_app_data_set.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/location_app_data_set.yaml diff --git a/cognite_toolkit/modules/cdf_infield_location/data_sets/location_source_data_set.yaml.tmpl b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/location_source_data_set.yaml.tmpl similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/data_sets/location_source_data_set.yaml.tmpl rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/location_source_data_set.yaml.tmpl diff --git a/cognite_toolkit/modules/cdf_infield_location/default.config.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/default.config.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/default.config.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/default.config.yaml diff --git a/cognite_toolkit/modules/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.sql b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.sql similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.sql rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.sql diff --git a/cognite_toolkit/modules/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml diff --git a/cognite_toolkit/modules/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.sql b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.sql similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.sql rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.sql diff --git a/cognite_toolkit/modules/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml diff --git a/cognite_toolkit/modules/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.sql b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.sql similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.sql rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.sql diff --git a/cognite_toolkit/modules/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml similarity index 100% rename from cognite_toolkit/modules/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml diff --git a/cognite_toolkit/config.yaml b/cognite_toolkit/config.yaml index 76bd338cd..42f66af00 100644 --- a/cognite_toolkit/config.yaml +++ b/cognite_toolkit/config.yaml @@ -1,4 +1,133 @@ -# EDIT THIS FILE! -# These variables will be globally available to your modules. -# -my_variable: test +cognite_modules: + common: + cdf_auth_readwrite_all: + readwrite_source_id: + readonly_source_id: + examples: + cdf_apm_simple_data_model: + # Values here are only valid for this module. + # The raw database values here point to the RAW data loaded in the cdf_oid_example_data + # module. If you have changed the default values in that module, you need to change them here as well. + default_location: oid + source_asset: workmate + source_workorder: workmate + source_timeseries: pi + datamodel: apm_simple + space: apm_simple + datamodel_version: '1' + view_Asset_version: '1' + view_WorkOrder_version: '1' + view_WorkItem_version: '1' + # In addition, the transformations are using the following variables from the global scope (your root default.config.yaml): + #cicd_clientId + #cicd_clientSecret + #cicd_tokenUri + #cdfProjectName + #cicd_scopes + #cicd_audience + cdf_oid_example_data: + # Only valid for this module, loads template variables from environment + # + # In the example below we are setting up a project based on the Open Industry Data (OID), + # that originates from the Valhall oil rig. Note that the location/site is NOT used + # to structure the data when on-boarding. The expectation is that a single source system + # and it's data pipeline may supply data for multiple locations/sites. + # The structuring of the data based on site/location should happen as part of processing + # the data in CDF, i.e. contextualisation. + # + # Each of data resource types have assigned the source system where the data originates from. + # This information will be used to construct RAW database names, and to create data sets in CDF, + # and can be used to control access. + default_location: oid + source_asset: workmate + source_workorder: workmate + source_files: fileshare + source_timeseries: pi + example_pump_asset_hierarchy: + # Only valid for this module, loads template variables from environment + raw_db: pump_assets + data_set: src:lift_pump_stations + clientId: ${IDP_CLIENT_ID} + clientSecret: ${IDP_CLIENT_SECRET} + tokenUri: ${IDP_TOKEN_URL} + # Optional: If idP requires providing the scopes + cdfProjectName: ${CDF_PROJECT} + scopes: + - ${IDP_SCOPES} +# Optional: If idP requires providing the audience + audience: ${IDP_AUDIENCE} + experimental: + cdf_asset_source_model: + # Only valid for this module, loads template variables from environment + model_space: ExtendedSourceDataModels + instance_space: cdfTemplateInstances + view_asset_version: '1' + data_model_version: '1' + root_asset_external_id: lift_pump_stations:root + example_pump_data_model: + # Only valid for this module, loads template variables from environment + model_space: pumpModelSpace + instance_space: pumpInstanceSpace + source_model_space: ExtendedSourceDataModels + source_model: ExtendedSourceData + view_Pump_version: '1' + view_LiftStation_version: '1' + data_model_version: '1' + data_model: PumpLiftStations + infield: + cdf_infield_common: + applicationsconfiguration_source_id: + cdf_infield_location: + # This default_location points to the location created by the cdf_oid_example_data module. + # When you create your own location by copying the cdf_oid_example_data module to + # set up data sets and groups, the below needs to refer to the location to define. + # + default_location: oid + module_version: '1' + apm_datamodel_space: APM_SourceData + apm_app_config_external_id: default-infield-config-minimal + apm_config_instance_space: APM_Config + # RAW databases to load workorders and other workorder data from + # The below values point to the RAW database in the cdf_oid_example_data and should be + # changed if you want to load workorders from another RAW database. + source_asset: workmate + source_workorder: workmate + workorder_raw_db: workorder_oid_workmate + # The table name in the raw_db database that has workorder data + workorder_table_name: workorders + +# the root asset for this location, needs to be updated for each location + root_asset_external_id: WMT:VAL + +# the following properties are required for +# infield and must be updated for each location + infield_default_location_checklist_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f + infield_default_location_normal_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e + infield_default_location_template_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f + infield_default_location_viewer_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e + +# Transformation credentials + clientId: ${IDP_CLIENT_ID} + clientSecret: ${IDP_CLIENT_SECRET} + tokenUri: ${IDP_TOKEN_URL} + # Optional: If idP requires providing the scopes + cdfProjectName: ${CDF_PROJECT} + scopes: ${IDP_SCOPES} + # Optional: If idP requires providing the audience + audience: ${IDP_AUDIENCE} + cdf_cluster: ${CDF_CLUSTER} + cicd_clientId: ${IDP_CLIENT_ID} + cicd_clientSecret: ${IDP_CLIENT_SECRET} + cicd_tokenUri: ${IDP_TOKEN_URL} + cdfProjectName: ${CDF_PROJECT} + cicd_scopes: + - ${IDP_SCOPES} +# Optional: If idP requires providing the audience + cicd_audience: ${IDP_AUDIENCE} + core: + cdf_apm_base: + apm_datamodel_space: APM_SourceData + apm_datamodel_version: '1' +custom_modules: + my_example_module: + example_variable: demo_dataset diff --git a/cognite_toolkit/custom_modules/README.md b/cognite_toolkit/custom_modules/README.md new file mode 100644 index 000000000..a4adeaca1 --- /dev/null +++ b/cognite_toolkit/custom_modules/README.md @@ -0,0 +1,9 @@ +# local_modules directory + +You are free to add your own modules to this directory as long as you don't use the `cdf_` prefix. +Each module should have a default.config.yaml file that contains variables that are used in the module. The +sub-directories in each module correspond to the different resources in CDF. See the [my_example_module](my_example_module/README.md) +for an example of a module. Run the command `cdf-tk init --upgrade` to add the variables from the default.config.yaml +into the `config.yaml` file in the root of your project directory. You can then override these default values in that `config.yaml` file. + +See the [module and package documentation](../docs/overview.md) for an introduction. diff --git a/cognite_toolkit/custom_modules/my_example_module/README.md b/cognite_toolkit/custom_modules/my_example_module/README.md new file mode 100644 index 000000000..04eb5d05a --- /dev/null +++ b/cognite_toolkit/custom_modules/my_example_module/README.md @@ -0,0 +1 @@ +This is an example of a custom module. It contains two TimeSeries with a dataset. diff --git a/cognite_toolkit/custom_modules/my_example_module/data_sets/data_sets.yaml b/cognite_toolkit/custom_modules/my_example_module/data_sets/data_sets.yaml new file mode 100644 index 000000000..4abcc1b2d --- /dev/null +++ b/cognite_toolkit/custom_modules/my_example_module/data_sets/data_sets.yaml @@ -0,0 +1,4 @@ +- externalId: ds_timeseries_{{example_variable}} + name: Example dataset. + description: This is an example dataset used to demonstrate how to create a custom module + in the Cognite Data Fusion Toolkit. diff --git a/cognite_toolkit/custom_modules/my_example_module/default.config.yaml b/cognite_toolkit/custom_modules/my_example_module/default.config.yaml new file mode 100644 index 000000000..861221be8 --- /dev/null +++ b/cognite_toolkit/custom_modules/my_example_module/default.config.yaml @@ -0,0 +1 @@ +example_variable: 'demo_dataset' diff --git a/cognite_toolkit/custom_modules/my_example_module/timeseries/timeseries.yaml b/cognite_toolkit/custom_modules/my_example_module/timeseries/timeseries.yaml new file mode 100644 index 000000000..c93ede30a --- /dev/null +++ b/cognite_toolkit/custom_modules/my_example_module/timeseries/timeseries.yaml @@ -0,0 +1,18 @@ +- externalId: 'cdf_tooklit:example_timeseries' + name: 'CDF Toolkit Example Timeseries' + dataSetExternalId: ds_timeseries_{{example_variable}} + isString: false + metadata: + - foo: 'bar' + isStep: false + description: This is an example timeseries used to demonstrate how to create a custom module + in the Cognite Data Fusion Toolkit. +- externalId: 'cdf_tooklit:example_timeseries:2' + name: 'CDF Toolkit Example Timeseries 2' + dataSetExternalId: ds_timeseries_{{example_variable}} + metadata: + - bar: 'foo' + isString: false + isStep: false + description: This is an example timeseries used to demonstrate how to create a custom module + in the Cognite Data Fusion Toolkit. diff --git a/cognite_toolkit/local.yaml b/cognite_toolkit/environments.yaml similarity index 92% rename from cognite_toolkit/local.yaml rename to cognite_toolkit/environments.yaml index 58dbd3a29..b54001b96 100644 --- a/cognite_toolkit/local.yaml +++ b/cognite_toolkit/environments.yaml @@ -24,7 +24,7 @@ demo: - cdf_demo_infield - cdf_oid_example_data local: - project: -dev + project: type: dev deploy: - cdf_auth_readwrite_all @@ -33,18 +33,18 @@ local: - cdf_infield_common - cdf_infield_location dev: - project: -dev + project: type: dev deploy: - cdf_demo_infield - cdf_oid_example_data staging: - project: -staging + project: type: staging deploy: - cdf_infield prod: - project: -prod + project: type: prod deploy: - cdf_infield diff --git a/cognite_toolkit/local_modules/README.md b/cognite_toolkit/local_modules/README.md deleted file mode 100644 index 1e53f1fab..000000000 --- a/cognite_toolkit/local_modules/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# local_modules directory - -You are free to add your own modules to this directory as long as you don't use the `cdf_` prefix. -Each module should have a config.yaml file that contains variables that are used in the module. The -sub-directories in each module correspond to the different resources in CDF. - -See the [module and package documentation](../docs/overview.md) for an introduction. \ No newline at end of file diff --git a/cognite_toolkit/modules/README.md b/cognite_toolkit/modules/README.md deleted file mode 100644 index b5d637f9b..000000000 --- a/cognite_toolkit/modules/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# Main modules folder - -Modules prefixed by `cdf_` are managed by Cognite and should not -be modified. They live in this directory. You should put your own modules in the -local_modules/ directory. - -Each module should have a config.yaml file that contains variables that are used in the module. The -sub-directories in each module correspond to the different resources in CDF. - -See the [module and package documentation](../docs/overview.md) for an introduction. \ No newline at end of file diff --git a/cognite_toolkit/packages.yaml b/cognite_toolkit/packages.yaml deleted file mode 100644 index 52772589b..000000000 --- a/cognite_toolkit/packages.yaml +++ /dev/null @@ -1,10 +0,0 @@ -# EDIT THIS FILE! -# -# This should be a used to create packages of modules that can be deployed. -# Only one packages command is allowed, with each package below it. -# Packages cannot be recursively defined. -packages: - # NOTE! Do NOT use package names prefixed with cdf_ as these are reserved for CDF packages. - my_package: - - module_a - - module_b diff --git a/config.yaml b/config.yaml deleted file mode 100644 index 76bd338cd..000000000 --- a/config.yaml +++ /dev/null @@ -1,4 +0,0 @@ -# EDIT THIS FILE! -# These variables will be globally available to your modules. -# -my_variable: test diff --git a/poetry.lock b/poetry.lock index 5b609ecc4..572f5f846 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "certifi" @@ -935,7 +935,7 @@ name = "pycparser" version = "2.21" description = "C parser in Python" optional = false -python-versions = "*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, @@ -1361,6 +1361,63 @@ pygments = ">=2.13.0,<3.0.0" [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "ruamel-yaml" +version = "0.18.5" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruamel.yaml-0.18.5-py3-none-any.whl", hash = "sha256:a013ac02f99a69cdd6277d9664689eb1acba07069f912823177c5eced21a6ada"}, + {file = "ruamel.yaml-0.18.5.tar.gz", hash = "sha256:61917e3a35a569c1133a8f772e1226961bf5a1198bea7e23f06a0841dea1ab0e"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} + +[package.extras] +docs = ["mercurial (>5.7)", "ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, +] + [[package]] name = "secretstorage" version = "3.3.3" @@ -1569,4 +1626,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "396847f137a1189d4b19d6867fee9bb90bf984866586848499777ade8450b28e" +content-hash = "53b6797631963ab474c52c78f19733be14b6a12eb9ac5763f5066cfcfa332050" diff --git a/pyproject.toml b/pyproject.toml index 00ebbc954..be2a5ba1c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,7 @@ regex = "^2023.6.3" chardet = "^5.1.0" typer = {version = "^0.9.0", extras = ["all"]} pytest-icdiff = "*" # Used for better diffs in pytest - +'ruamel.yaml' = "^0.18" # Used for perserving comments in yaml files [tool.poetry.group.dev.dependencies] mypy = "^1.7.1" diff --git a/tests/test_approval_modules.py b/tests/test_approval_modules.py index b1cea9d4f..06d364737 100644 --- a/tests/test_approval_modules.py +++ b/tests/test_approval_modules.py @@ -10,7 +10,7 @@ import os from collections.abc import Iterator from pathlib import Path -from typing import Any +from typing import Any, Literal from unittest.mock import MagicMock import pytest @@ -18,8 +18,8 @@ from cognite.client import CogniteClient from pytest import MonkeyPatch -from cognite_toolkit.cdf import Common, build, clean, deploy -from cognite_toolkit.cdf_tk.templates import TMPL_DIRS, read_yaml_files +from cognite_toolkit.cdf import Common, build, clean, deploy, main_init +from cognite_toolkit.cdf_tk.templates import COGNITE_MODULES, iterate_modules, read_yaml_file, read_yaml_files from cognite_toolkit.cdf_tk.utils import CDFToolConfig REPO_ROOT = Path(__file__).parent.parent @@ -50,10 +50,8 @@ def chdir(new_dir: Path) -> Iterator[None]: def find_all_modules() -> Iterator[Path]: - for tmpl_dir in TMPL_DIRS: - for module in (REPO_ROOT / f"./cognite_toolkit/{tmpl_dir}").iterdir(): - if module.is_dir(): - yield pytest.param(module, id=f"{module.parent.name}/{module.name}") + for module, _ in iterate_modules(REPO_ROOT / "cognite_toolkit" / COGNITE_MODULES): + yield pytest.param(module, id=f"{module.parent.name}/{module.name}") @pytest.fixture @@ -61,6 +59,13 @@ def local_tmp_path(): return SNAPSHOTS_DIR.parent / "tmp" +@pytest.fixture +def local_tmp_project_path(local_tmp_path: Path): + project_path = SNAPSHOTS_DIR.parent / "pytest-project" + project_path.mkdir(exist_ok=True) + return project_path + + @pytest.fixture def cdf_tool_config(cognite_client_approval: CogniteClient, monkeypatch: MonkeyPatch) -> CDFToolConfig: monkeypatch.setenv("CDF_PROJECT", "pytest-project") @@ -99,16 +104,28 @@ def fake_read_yaml_files( name: str | None = None, ) -> dict[str, Any]: if name == "local.yaml": - return {"test": {"project": "pytest-project", "type": "dev", "deploy": [module_path.name]}} + return {"dev": {"project": "pytest-project", "type": "dev", "deploy": [module_path.name]}} return read_yaml_files(yaml_dirs, name) monkeypatch.setattr("cognite_toolkit.cdf_tk.templates.read_yaml_files", fake_read_yaml_files) +def mock_read_yaml_file(module_path: Path, monkeypatch: MonkeyPatch) -> None: + def fake_read_yaml_file( + filepath: Path, expected_output: Literal["list", "dict"] = "dict" + ) -> dict[str, Any] | list[dict[str, Any]]: + if filepath.name == "environments.yaml": + return {"dev": {"project": "pytest-project", "type": "dev", "deploy": [module_path.name]}} + return read_yaml_file(filepath, expected_output) + + monkeypatch.setattr("cognite_toolkit.cdf_tk.templates.read_yaml_file", fake_read_yaml_file) + + @pytest.mark.parametrize("module_path", list(find_all_modules())) def test_deploy_module_approval( module_path: Path, local_tmp_path: Path, + local_tmp_project_path: Path, monkeypatch: MonkeyPatch, cognite_client_approval: CogniteClient, cdf_tool_config: CDFToolConfig, @@ -116,17 +133,29 @@ def test_deploy_module_approval( data_regression, ) -> None: mock_read_yaml_files(module_path, monkeypatch) + mock_read_yaml_file(module_path, monkeypatch) + + main_init( + typer_context, + dry_run=False, + upgrade=False, + git=None, + init_dir=str(local_tmp_project_path), + no_backup=True, + clean=True, + ) + build( typer_context, - source_dir="./cognite_toolkit", + source_dir=str(local_tmp_project_path), build_dir=str(local_tmp_path), - build_env="test", + build_env="dev", clean=True, ) deploy( typer_context, build_dir=str(local_tmp_path), - build_env="test", + build_env="dev", interactive=False, drop=True, dry_run=False, @@ -141,6 +170,7 @@ def test_deploy_module_approval( def test_clean_module_approval( module_path: Path, local_tmp_path: Path, + local_tmp_project_path: Path, monkeypatch: MonkeyPatch, cognite_client_approval: CogniteClient, cdf_tool_config: CDFToolConfig, @@ -148,11 +178,23 @@ def test_clean_module_approval( data_regression, ) -> None: mock_read_yaml_files(module_path, monkeypatch) + mock_read_yaml_file(module_path, monkeypatch) + + main_init( + typer_context, + dry_run=False, + upgrade=False, + git=None, + init_dir=str(local_tmp_project_path), + no_backup=True, + clean=True, + ) + build( typer_context, - source_dir="./cognite_toolkit", + source_dir=str(local_tmp_project_path), build_dir=str(local_tmp_path), - build_env="test", + build_env="dev", clean=True, ) clean( diff --git a/tests/test_versions.py b/tests/test_build.py similarity index 83% rename from tests/test_versions.py rename to tests/test_build.py index 97b59cf7a..900bccc77 100644 --- a/tests/test_versions.py +++ b/tests/test_build.py @@ -5,9 +5,11 @@ from re import Match import pytest +import yaml from packaging.version import Version from cognite_toolkit._version import __template_version__, __version__ +from cognite_toolkit.cdf_tk.templates import generate_config from tests.constants import REPO_ROOT if sys.version_info >= (3, 11): @@ -71,6 +73,15 @@ def test_changelog_entry_date(changelog_name: str) -> None: assert True +def test_config_yaml_updated() -> None: + config_yaml = yaml.safe_load((REPO_ROOT / "cognite_toolkit" / "config.yaml").read_text(encoding="utf-8")) + expected_config = yaml.safe_load(generate_config(REPO_ROOT / "cognite_toolkit")[0]) + assert config_yaml == expected_config, ( + "The 'config.yaml' file is not up to date with the latest changes. " + "Please run 'python -m cognite_toolkit.cdf_tk.templates' to update it." + ) + + def _parse_changelog(changelog: str) -> Iterator[Match[str]]: changelog = (REPO_ROOT / changelog).read_text(encoding="utf-8") return re.finditer(r"##\s\[(\d+\.\d+\.\d+(a\d+)?)\]\s-\s(\d+-\d+-\d+)", changelog) diff --git a/tests/test_cdf_tk/project_configs/cognite_modules/a_module/default.config.yaml b/tests/test_cdf_tk/project_configs/cognite_modules/a_module/default.config.yaml new file mode 100644 index 000000000..d73ee2d25 --- /dev/null +++ b/tests/test_cdf_tk/project_configs/cognite_modules/a_module/default.config.yaml @@ -0,0 +1,2 @@ +readwrite_source_id: +readonly_source_id: diff --git a/tests/test_cdf_tk/project_configs/cognite_modules/another_module/default.config.yaml b/tests/test_cdf_tk/project_configs/cognite_modules/another_module/default.config.yaml new file mode 100644 index 000000000..b6916ea7b --- /dev/null +++ b/tests/test_cdf_tk/project_configs/cognite_modules/another_module/default.config.yaml @@ -0,0 +1,5 @@ +default_location: oid +source_asset: workmate +source_workorder: workmate +source_files: fileshare +source_timeseries: pi diff --git a/tests/test_cdf_tk/project_configs/cognite_modules/default.config.yaml b/tests/test_cdf_tk/project_configs/cognite_modules/default.config.yaml new file mode 100644 index 000000000..f7fbbf118 --- /dev/null +++ b/tests/test_cdf_tk/project_configs/cognite_modules/default.config.yaml @@ -0,0 +1,2 @@ +top_variable: + diff --git a/tests/test_cdf_tk/project_configs/cognite_modules/parent_module/child_module/default.config.yaml b/tests/test_cdf_tk/project_configs/cognite_modules/parent_module/child_module/default.config.yaml new file mode 100644 index 000000000..d4bb0cf32 --- /dev/null +++ b/tests/test_cdf_tk/project_configs/cognite_modules/parent_module/child_module/default.config.yaml @@ -0,0 +1 @@ +child_variable: diff --git a/tests/test_cdf_tk/test_templates.py b/tests/test_cdf_tk/test_templates.py new file mode 100644 index 000000000..d158b6e91 --- /dev/null +++ b/tests/test_cdf_tk/test_templates.py @@ -0,0 +1,87 @@ +from __future__ import annotations + +from pathlib import Path +from typing import Any + +import pytest +import yaml + +from cognite_toolkit.cdf_tk.templates import COGNITE_MODULES, create_local_config, generate_config, split_config + +BUILD_CONFIG = Path(__file__).parent / "project_configs" + + +def generate_config_test_cases(): + expected = { + COGNITE_MODULES: { + "a_module": { + "readwrite_source_id": "", + "readonly_source_id": "", + }, + "another_module": { + "default_location": "oid", + "source_asset": "workmate", + "source_workorder": "workmate", + "source_files": "fileshare", + "source_timeseries": "pi", + }, + "top_variable": "", + "parent_module": {"child_module": {"child_variable": ""}}, + }, + } + + yield pytest.param(yaml.safe_dump(expected, sort_keys=False), None, id="Include all") + + only_a_module = { + COGNITE_MODULES: { + "a_module": { + "readwrite_source_id": "", + "readonly_source_id": "", + }, + } + } + yield pytest.param(yaml.safe_dump(only_a_module, sort_keys=False), {"a_module"}, id="Include one module") + + +@pytest.mark.parametrize( + "expected, include", + list(generate_config_test_cases()), +) +def test_generate_config(expected: str, include: set[str] | None) -> None: + actual, _ = generate_config(BUILD_CONFIG, include_modules=include) + + assert actual == expected + + +@pytest.fixture() +def my_config(): + return { + "top_variable": "my_top_variable", + "module_a": { + "readwrite_source_id": "my_readwrite_source_id", + "readonly_source_id": "my_readonly_source_id", + }, + "parent": {"child": {"child_variable": "my_child_variable"}}, + } + + +def test_split_config(my_config: dict[str, Any]) -> None: + expected = { + "": {"top_variable": "my_top_variable"}, + "module_a": { + "readwrite_source_id": "my_readwrite_source_id", + "readonly_source_id": "my_readonly_source_id", + }, + "parent.child": {"child_variable": "my_child_variable"}, + } + actual = split_config(my_config) + + assert actual == expected + + +def test_create_local_config(my_config: dict[str, Any]): + configs = split_config(my_config) + + local_config = create_local_config(configs, Path("parent/child/auth/")) + + assert dict(local_config.items()) == {"top_variable": "my_top_variable", "child_variable": "my_child_variable"} From b2d2cc8a9f349ba18af35ed2763597dfbabccced Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Mon, 11 Dec 2023 16:01:48 +0100 Subject: [PATCH 25/90] moved valhall into correct dir --- .../auth/asset.extractor.groups.yaml | 0 .../auth/asset.processing.groups.yaml | 0 .../cdf_data_pipeline_asset_valhall/auth/asset.read.groups.yaml | 0 .../cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml | 0 .../examples/cdf_data_pipeline_asset_valhall/default.config.yaml | 0 .../source_asset_valhall_workmate.config.yaml | 0 .../extraction_pipelines/source_asset_valhall_workmate.yaml | 0 .../transformations/tr_asset_oid_workmate_asset_hierarchy.sql | 0 .../transformations/tr_asset_oid_workmate_asset_hierarchy.yaml | 0 9 files changed, 0 insertions(+), 0 deletions(-) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_data_pipeline_asset_valhall/auth/asset.extractor.groups.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_data_pipeline_asset_valhall/auth/asset.processing.groups.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_data_pipeline_asset_valhall/auth/asset.read.groups.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_data_pipeline_asset_valhall/default.config.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.sql (100%) rename cognite_toolkit/{ => cognite_modules}/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml (100%) diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.extractor.groups.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/auth/asset.extractor.groups.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.extractor.groups.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/auth/asset.extractor.groups.yaml diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.processing.groups.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/auth/asset.processing.groups.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.processing.groups.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/auth/asset.processing.groups.yaml diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.read.groups.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/auth/asset.read.groups.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/auth/asset.read.groups.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/auth/asset.read.groups.yaml diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/default.config.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/default.config.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/default.config.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/default.config.yaml diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.sql b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.sql similarity index 100% rename from cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.sql rename to cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.sql diff --git a/cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml similarity index 100% rename from cognite_toolkit/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml From b5269b68f5957d936e8b5ba7ed25b587882fa65b Mon Sep 17 00:00:00 2001 From: Greger Wedel Date: Mon, 11 Dec 2023 16:17:46 +0100 Subject: [PATCH 26/90] CDF-20390 replace fixed schedule with variable --- ...rkmate_apm_simple_load_asset2children.yaml | 3 +- ...t_oid_workmate_apm_simple_load_assets.yaml | 3 +- ..._pi_apm_simple_load_timeseries2assets.yaml | 3 +- ...id_workmate_apm_simple_load_workitems.yaml | 3 +- ...mate_apm_simple_load_workitems2assets.yaml | 3 +- ..._apm_simple_load_workitems2workorders.yaml | 3 +- ...d_workmate_apm_simple_load_workorders.yaml | 3 +- ...ate_apm_simple_load_workorders2assets.yaml | 3 +- ...tr_asset_oid_workmate_asset_hierarchy.yaml | 3 +- ...asset_hierarchy-load-collections_pump.yaml | 3 +- ...sset_hierarchy_cdf_asset_source_model.yaml | 3 +- ...del-populate-lift_station_pumps_edges.yaml | 3 +- .../pump_model-populate-pump_container.yaml | 3 +- ...c_asset_parents_from_hierarchy_to_apm.yaml | 3 +- ...eld_sync_assets_from_hierarchy_to_apm.yaml | 3 +- ...eld_sync_workorders_to_apm_activities.yaml | 3 +- default.config.yaml | 6 ++++ .../cdf_apm_simple.yaml | 36 +++++++++---------- .../cdf_apm_simple_data_model.yaml | 32 ++++++++--------- .../cdf_asset_source_model.yaml | 4 +-- .../cdf_infield_location.yaml | 12 +++---- .../cdf_oid_example_data.yaml | 4 +-- .../example_pump_asset_hierarchy.yaml | 4 +-- .../example_pump_data_model.yaml | 8 ++--- 24 files changed, 72 insertions(+), 82 deletions(-) diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml index 04698437b..d17a4aade 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml @@ -24,6 +24,5 @@ authentication: # Optional: If idP requires providing the cicd_audience audience: {{cicd_audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: true \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml index 76f4a2f49..a8e28de70 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml @@ -23,6 +23,5 @@ authentication: # Optional: If idP requires providing the cicd_audience audience: {{cicd_audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: true \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml index 71419741e..21a9cb6c8 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml @@ -23,6 +23,5 @@ authentication: # Optional: If idP requires providing the cicd_audience audience: {{cicd_audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: true \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml index 602f89dd6..b0b9b1bd8 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml @@ -23,6 +23,5 @@ authentication: # Optional: If idP requires providing the cicd_audience audience: {{cicd_audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: true \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml index 1acf79e18..71963b2dc 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml @@ -24,6 +24,5 @@ authentication: # Optional: If idP requires providing the cicd_audience audience: {{cicd_audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: true \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml index 100098009..04bb81979 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml @@ -24,6 +24,5 @@ authentication: # Optional: If idP requires providing the cicd_audience audience: {{cicd_audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: true \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml index 187055c89..2c2398714 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml @@ -23,6 +23,5 @@ authentication: # Optional: If idP requires providing the cicd_audience audience: {{cicd_audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: true \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml index b2d4ce959..2348af72d 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml @@ -24,6 +24,5 @@ authentication: # Optional: If idP requires providing the cicd_audience audience: {{cicd_audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: true \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml index 8606b2361..a26d32e70 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml @@ -17,6 +17,5 @@ authentication: # Optional: If idP requires providing the cicd_audience audience: {{cicd_audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: false \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml index 5eedb9e52..78b0c8bdc 100644 --- a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml +++ b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml @@ -15,6 +15,5 @@ authentication: # Optional: If idP requires providing the audience audience: {{audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: true diff --git a/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml index 5085ed241..0c38c60af 100644 --- a/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml +++ b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml @@ -21,6 +21,5 @@ authentication: # Optional: If idP requires providing the audience audience: {{cicd_audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: true diff --git a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml index 8415282f0..b1f0a2cfd 100644 --- a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml +++ b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml @@ -22,6 +22,5 @@ authentication: # Optional: If idP requires providing the audience audience: {{cicd_audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: true diff --git a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml index 75f20fed2..0d3987ac7 100644 --- a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml +++ b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml @@ -21,6 +21,5 @@ authentication: # Optional: If idP requires providing the audience audience: {{cicd_audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: true diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml index 634acb433..526d95408 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml @@ -23,6 +23,5 @@ authentication: # Optional: If idP requires providing the audience audience: {{audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: false diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml index dc0d27fc0..6e0bc0b2a 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml @@ -23,6 +23,5 @@ authentication: # Optional: If idP requires providing the audience audience: {{audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: false \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml index 60590b38d..a224f2a77 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml @@ -24,6 +24,5 @@ authentication: # Optional: If idP requires providing the audience audience: {{audience}} schedule: - # every hour - interval: '0 * * * *' + interval: '{{scheduleHourly}}' isPaused: false diff --git a/default.config.yaml b/default.config.yaml index 0e467275a..ce3c0a5aa 100644 --- a/default.config.yaml +++ b/default.config.yaml @@ -7,3 +7,9 @@ cicd_scopes: - ${IDP_SCOPES} # Optional: If idP requires providing the audience cicd_audience: ${IDP_AUDIENCE} +# Seven minutes past each hour +scheduleHourly: '7 * * * *' +# Every fifteen minutes +scheduleQuarterly: '0/15 * * * *' +# Daily at 1:35 AM +scheduleDaily: '35 1 * * *' \ No newline at end of file diff --git a/tests/test_approval_modules_snapshots/cdf_apm_simple.yaml b/tests/test_approval_modules_snapshots/cdf_apm_simple.yaml index 173033210..f1e921d33 100644 --- a/tests/test_approval_modules_snapshots/cdf_apm_simple.yaml +++ b/tests/test_approval_modules_snapshots/cdf_apm_simple.yaml @@ -892,7 +892,7 @@ Transformation: from\n `apm_simple`.`assets`;\n" schedule: externalId: apm_simple-load-asset-hierarchy - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -928,7 +928,7 @@ Transformation: \ as endNode\nfrom\n `apm_simple`.`asset2children`;\n" schedule: externalId: apm_simple-load-asset2children - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -969,7 +969,7 @@ Transformation: \ as BOOLEAN) as isActive\nfrom\n `apm_simple`.`assets`;\n" schedule: externalId: apm_simple-load-assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -1004,7 +1004,7 @@ Transformation: \ metrics\nfrom\n `apm_simple`.`timeseries2assets`;\n" schedule: externalId: apm_simple-load-timeseries2assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -1043,7 +1043,7 @@ Transformation: \ method,\n cast(`isCompleted` as BOOLEAN) as isCompleted\nfrom\n `apm_simple`.`workitems`;\n" schedule: externalId: apm_simple-load-workitems - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -1085,7 +1085,7 @@ Transformation: from\n `apm_simple`.`workitem2assets`;\n" schedule: externalId: apm_simple-load-workitems2assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -1127,7 +1127,7 @@ Transformation: from\n `apm_simple`.`workorder2items`;\n" schedule: externalId: apm_simple-load-workitems2workorders - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -1173,7 +1173,7 @@ Transformation: \ STRING) as programNumber \nfrom `apm_simple`.`workorders`;\n" schedule: externalId: apm_simple-load-workorders - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -1209,7 +1209,7 @@ Transformation: \ as endNode\nfrom\n `apm_simple`.`workorder2assets`;\n" schedule: externalId: apm_simple-load-workorders2assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -1220,31 +1220,31 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: apm_simple-load-asset-hierarchy - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: apm_simple-load-asset2children - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: apm_simple-load-assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: apm_simple-load-timeseries2assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: apm_simple-load-workitems - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: apm_simple-load-workitems2assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: apm_simple-load-workitems2workorders - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: apm_simple-load-workorders - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: apm_simple-load-workorders2assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true View: - externalId: Asset diff --git a/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml b/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml index dff37bfe0..2797dab5d 100644 --- a/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml +++ b/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml @@ -358,7 +358,7 @@ Transformation: \ as endNode\nfrom\n `asset_oid_workmate`.`asset2children`;\n" schedule: externalId: tr_asset_oid_workmate_apm_simple_load_asset2children - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -399,7 +399,7 @@ Transformation: \ as BOOLEAN) as isActive\nfrom\n `asset_oid_workmate`.`assets`;\n" schedule: externalId: tr_asset_oid_workmate_apm_simple_load_assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -434,7 +434,7 @@ Transformation: \ metrics\nfrom\n `files_oid_{{source_files}}`.`timeseries2assets`;\n" schedule: externalId: tr_timeseries_oid_pi_apm_simple_load_timeseries2assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -473,7 +473,7 @@ Transformation: \ method,\n cast(`isCompleted` as BOOLEAN) as isCompleted\nfrom\n `workorder_oid_workmate`.`workitems`;\n" schedule: externalId: tr_workorder_oid_workmate_apm_simple_load_workitems - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -515,7 +515,7 @@ Transformation: from\n `workorder_oid_workmate`.`workitem2assets`;\n" schedule: externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -557,7 +557,7 @@ Transformation: from\n `workorder_oid_workmate`.`workorder2items`;\n" schedule: externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2workorders - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -603,7 +603,7 @@ Transformation: \ STRING) as programNumber \nfrom `workorder_oid_workmate`.`workorders`;\n" schedule: externalId: tr_workorder_oid_workmate_apm_simple_load_workorders - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -639,7 +639,7 @@ Transformation: \ as endNode\nfrom\n `workorder_oid_workmate`.`workorder2assets`;\n" schedule: externalId: tr_workorder_oid_workmate_apm_simple_load_workorders2assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -650,28 +650,28 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: tr_asset_oid_workmate_apm_simple_load_asset2children - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: tr_asset_oid_workmate_apm_simple_load_assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: tr_timeseries_oid_pi_apm_simple_load_timeseries2assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2workorders - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders2assets - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true View: - externalId: Asset diff --git a/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml b/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml index 6304233dc..ed459152e 100644 --- a/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml +++ b/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml @@ -78,7 +78,7 @@ Transformation: \ 'Pump');\n" schedule: externalId: sync-asset_hierarchy_cdf_asset_source_model - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -89,7 +89,7 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: sync-asset_hierarchy_cdf_asset_source_model - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true View: - externalId: Asset diff --git a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml index 7a6c719ed..be17db43f 100644 --- a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml +++ b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml @@ -398,7 +398,7 @@ Transformation: \ = rootAsset.id" schedule: externalId: tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: false sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -435,7 +435,7 @@ Transformation: \ join cdf_assetSubtree('WMT:VAL') as rootAsset on asset.rootId = rootAsset.id" schedule: externalId: tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: false sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -475,7 +475,7 @@ Transformation: \ 'workmate' as source\n from\n `workorder_oid_workmate`.`workorders`;\n" schedule: externalId: tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: false sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -486,13 +486,13 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: false - externalId: tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: false - externalId: tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: false deleted: Transformation: diff --git a/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml b/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml index 5bb37b8ca..154de1788 100644 --- a/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml +++ b/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml @@ -688,7 +688,7 @@ Transformation: from\n `asset_oid_workmate`.`assets`;\n" schedule: externalId: tr_asset_oid_workmate_asset_hierarchy - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: false sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -699,7 +699,7 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: tr_asset_oid_workmate_asset_hierarchy - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: false deleted: FileMetadata: diff --git a/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml b/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml index ce142bc14..da1e90bef 100644 --- a/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml +++ b/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml @@ -78,7 +78,7 @@ Transformation: \ LifeCycleStatus,\n LocationDescription\n ) as metadata\nfrom pump_assets.`collections_pump`\n" schedule: externalId: pump_asset_hierarchy-load-collections_pump - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -89,7 +89,7 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: pump_asset_hierarchy-load-collections_pump - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true deleted: Table: diff --git a/tests/test_approval_modules_snapshots/example_pump_data_model.yaml b/tests/test_approval_modules_snapshots/example_pump_data_model.yaml index 35ac3435f..11c9e5354 100644 --- a/tests/test_approval_modules_snapshots/example_pump_data_model.yaml +++ b/tests/test_approval_modules_snapshots/example_pump_data_model.yaml @@ -93,7 +93,7 @@ Transformation: , \"ExtendedSourceData\", \"1\", \"Asset\")\nwhere\n startswith(title, 'Pump')\n" schedule: externalId: pump_model-populate-lift_station_pumps_edges - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -133,7 +133,7 @@ Transformation: \ startswith(title, 'Pump')\n" schedule: externalId: pump_model-populate-pump_container - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -144,10 +144,10 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: pump_model-populate-lift_station_pumps_edges - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true - externalId: pump_model-populate-pump_container - interval: 0 * * * * + interval: '{{scheduleHourly}}' isPaused: true View: - externalId: LiftStation From 87c72863a15df5d01184bc3d302a428320560a6a Mon Sep 17 00:00:00 2001 From: Greger Wedel Date: Mon, 11 Dec 2023 16:44:07 +0100 Subject: [PATCH 27/90] CDF-20431 and CDF-20390 clean up transformations --- .../cognite_modules/default.config.yaml | 6 ++++ .../default.config.yaml | 8 +---- ...rkmate_apm_simple_load_asset2children.yaml | 2 +- ...t_oid_workmate_apm_simple_load_assets.yaml | 2 +- ..._pi_apm_simple_load_timeseries2assets.yaml | 2 +- ...id_workmate_apm_simple_load_workitems.yaml | 2 +- ...mate_apm_simple_load_workitems2assets.yaml | 2 +- ..._apm_simple_load_workitems2workorders.yaml | 2 +- ...d_workmate_apm_simple_load_workorders.yaml | 2 +- ...ate_apm_simple_load_workorders2assets.yaml | 2 +- .../default.config.yaml | 9 ----- ...asset_hierarchy-load-collections_pump.yaml | 10 +++--- .../cdf_infield_location/default.config.yaml | 10 ------ ...c_asset_parents_from_hierarchy_to_apm.yaml | 11 +++--- ...eld_sync_assets_from_hierarchy_to_apm.yaml | 11 +++--- ...eld_sync_workorders_to_apm_activities.yaml | 11 +++--- cognite_toolkit/config.yaml | 21 +++-------- default.config.yaml | 15 -------- .../cdf_apm_simple.yaml | 36 +++++++++---------- .../cdf_apm_simple_data_model.yaml | 32 ++++++++--------- .../cdf_asset_source_model.yaml | 4 +-- .../cdf_infield_location.yaml | 12 +++---- .../cdf_oid_example_data.yaml | 4 +-- .../example_pump_asset_hierarchy.yaml | 4 +-- .../example_pump_data_model.yaml | 8 ++--- 25 files changed, 89 insertions(+), 139 deletions(-) delete mode 100644 default.config.yaml diff --git a/cognite_toolkit/cognite_modules/default.config.yaml b/cognite_toolkit/cognite_modules/default.config.yaml index 3552617bd..01e39e685 100644 --- a/cognite_toolkit/cognite_modules/default.config.yaml +++ b/cognite_toolkit/cognite_modules/default.config.yaml @@ -12,3 +12,9 @@ cicd_scopes: - ${IDP_SCOPES} # Optional: If idP requires providing the audience cicd_audience: ${IDP_AUDIENCE} +# Seven minutes past each hour +scheduleHourly: '7 * * * *' +# Every fifteen minutes +scheduleQuarterly: '0/15 * * * *' +# Daily at 1:35 AM +scheduleDaily: '35 1 * * *' diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/default.config.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/default.config.yaml index 87152a6e4..879eeea59 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/default.config.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/default.config.yaml @@ -11,10 +11,4 @@ datamodel_version: '1' view_Asset_version: '1' view_WorkOrder_version: '1' view_WorkItem_version: '1' -# In addition, the transformations are using the following variables from the global scope (your root default.config.yaml): -#cicd_clientId -#cicd_clientSecret -#cicd_tokenUri -#cdfProjectName -#cicd_scopes -#cicd_audience \ No newline at end of file +pause_transformations: true diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml index d17a4aade..8015e18c2 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml @@ -25,4 +25,4 @@ authentication: audience: {{cicd_audience}} schedule: interval: '{{scheduleHourly}}' - isPaused: true \ No newline at end of file + isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml index a8e28de70..e4c61b476 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml @@ -24,4 +24,4 @@ authentication: audience: {{cicd_audience}} schedule: interval: '{{scheduleHourly}}' - isPaused: true \ No newline at end of file + isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml index 21a9cb6c8..659d984b2 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml @@ -24,4 +24,4 @@ authentication: audience: {{cicd_audience}} schedule: interval: '{{scheduleHourly}}' - isPaused: true \ No newline at end of file + isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml index b0b9b1bd8..6354fa90f 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml @@ -24,4 +24,4 @@ authentication: audience: {{cicd_audience}} schedule: interval: '{{scheduleHourly}}' - isPaused: true \ No newline at end of file + isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml index 71963b2dc..52c9aa386 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml @@ -25,4 +25,4 @@ authentication: audience: {{cicd_audience}} schedule: interval: '{{scheduleHourly}}' - isPaused: true \ No newline at end of file + isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml index 04bb81979..b68e562f5 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml @@ -25,4 +25,4 @@ authentication: audience: {{cicd_audience}} schedule: interval: '{{scheduleHourly}}' - isPaused: true \ No newline at end of file + isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml index 2c2398714..b3cf23827 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml @@ -24,4 +24,4 @@ authentication: audience: {{cicd_audience}} schedule: interval: '{{scheduleHourly}}' - isPaused: true \ No newline at end of file + isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml index 2348af72d..32764fd2f 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml @@ -25,4 +25,4 @@ authentication: audience: {{cicd_audience}} schedule: interval: '{{scheduleHourly}}' - isPaused: true \ No newline at end of file + isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/default.config.yaml b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/default.config.yaml index 60e9c401e..38b70c9f7 100644 --- a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/default.config.yaml +++ b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/default.config.yaml @@ -1,12 +1,3 @@ # Only valid for this module, loads template variables from environment raw_db: pump_assets data_set: src:lift_pump_stations -clientId: ${IDP_CLIENT_ID} -clientSecret: ${IDP_CLIENT_SECRET} -tokenUri: ${IDP_TOKEN_URL} -# Optional: If idP requires providing the scopes -cdfProjectName: ${CDF_PROJECT} -scopes: - - ${IDP_SCOPES} -# Optional: If idP requires providing the audience -audience: ${IDP_AUDIENCE} diff --git a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml index 78b0c8bdc..7d0b5fc45 100644 --- a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml +++ b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml @@ -6,14 +6,14 @@ destination: # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: - clientId: {{clientId}} - clientSecret: {{clientSecret}} - tokenUri: {{tokenUri}} + clientId: {{cicd_clientId}} + clientSecret: {{cicd_clientSecret}} + tokenUri: {{cicd_tokenUri}} # Optional: If idP requires providing the scopes cdfProjectName: {{cdfProjectName}} - scopes: {{scopes}} + scopes: {{cicd_scopes}} # Optional: If idP requires providing the audience - audience: {{audience}} + audience: {{cicd_audience}} schedule: interval: '{{scheduleHourly}}' isPaused: true diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/default.config.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/default.config.yaml index ebf70de50..97ed0e102 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/default.config.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/default.config.yaml @@ -25,13 +25,3 @@ infield_default_location_checklist_admin_users_source_id: 684237e9-c1fd-4d3c-8c5 infield_default_location_normal_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e infield_default_location_template_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f infield_default_location_viewer_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e - -# Transformation credentials -clientId: ${IDP_CLIENT_ID} -clientSecret: ${IDP_CLIENT_SECRET} -tokenUri: ${IDP_TOKEN_URL} -# Optional: If idP requires providing the scopes -cdfProjectName: ${CDF_PROJECT} -scopes: ${IDP_SCOPES} -# Optional: If idP requires providing the audience -audience: ${IDP_AUDIENCE} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml index 526d95408..5e90cbcae 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml @@ -13,15 +13,14 @@ action: upsert # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: - clientId: {{clientId}} - clientSecret: {{clientSecret}} - tokenUri: {{tokenUri}} + clientId: {{cicd_clientId}} + clientSecret: {{cicd_clientSecret}} + tokenUri: {{cicd_tokenUri}} # Optional: If idP requires providing the scopes cdfProjectName: {{cdfProjectName}} - scopes: - - {{scopes}} + scopes: {{cicd_scopes}} # Optional: If idP requires providing the audience - audience: {{audience}} + audience: {{cicd_audience}} schedule: interval: '{{scheduleHourly}}' isPaused: false diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml index 6e0bc0b2a..2a6e1308b 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml @@ -13,15 +13,14 @@ action: upsert # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: - clientId: {{clientId}} - clientSecret: {{clientSecret}} - tokenUri: {{tokenUri}} + clientId: {{cicd_clientId}} + clientSecret: {{cicd_clientSecret}} + tokenUri: {{cicd_tokenUri}} # Optional: If idP requires providing the scopes cdfProjectName: {{cdfProjectName}} - scopes: - - {{scopes}} + scopes: {{cicd_scopes}} # Optional: If idP requires providing the audience - audience: {{audience}} + audience: {{cicd_audience}} schedule: interval: '{{scheduleHourly}}' isPaused: false \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml index a224f2a77..6ba6fee82 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml @@ -14,15 +14,14 @@ action: upsert # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: - clientId: {{clientId}} - clientSecret: {{clientSecret}} - tokenUri: {{tokenUri}} + clientId: {{cicd_clientId}} + clientSecret: {{cicd_clientSecret}} + tokenUri: {{cicd_tokenUri}} # Optional: If idP requires providing the scopes cdfProjectName: {{cdfProjectName}} - scopes: - - {{scopes}} + scopes: {{cicd_scopes}} # Optional: If idP requires providing the audience - audience: {{audience}} + audience: {{cicd_audience}} schedule: interval: '{{scheduleHourly}}' isPaused: false diff --git a/cognite_toolkit/config.yaml b/cognite_toolkit/config.yaml index 42f66af00..fbb25c57d 100644 --- a/cognite_toolkit/config.yaml +++ b/cognite_toolkit/config.yaml @@ -25,6 +25,7 @@ cognite_modules: #cdfProjectName #cicd_scopes #cicd_audience + pause_transformations: true cdf_oid_example_data: # Only valid for this module, loads template variables from environment # @@ -47,15 +48,6 @@ cognite_modules: # Only valid for this module, loads template variables from environment raw_db: pump_assets data_set: src:lift_pump_stations - clientId: ${IDP_CLIENT_ID} - clientSecret: ${IDP_CLIENT_SECRET} - tokenUri: ${IDP_TOKEN_URL} - # Optional: If idP requires providing the scopes - cdfProjectName: ${CDF_PROJECT} - scopes: - - ${IDP_SCOPES} -# Optional: If idP requires providing the audience - audience: ${IDP_AUDIENCE} experimental: cdf_asset_source_model: # Only valid for this module, loads template variables from environment @@ -107,14 +99,6 @@ cognite_modules: infield_default_location_viewer_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e # Transformation credentials - clientId: ${IDP_CLIENT_ID} - clientSecret: ${IDP_CLIENT_SECRET} - tokenUri: ${IDP_TOKEN_URL} - # Optional: If idP requires providing the scopes - cdfProjectName: ${CDF_PROJECT} - scopes: ${IDP_SCOPES} - # Optional: If idP requires providing the audience - audience: ${IDP_AUDIENCE} cdf_cluster: ${CDF_CLUSTER} cicd_clientId: ${IDP_CLIENT_ID} cicd_clientSecret: ${IDP_CLIENT_SECRET} @@ -128,6 +112,9 @@ cognite_modules: cdf_apm_base: apm_datamodel_space: APM_SourceData apm_datamodel_version: '1' + scheduleHourly: 7 * * * * + scheduleQuarterly: 0/15 * * * * + scheduleDaily: 35 1 * * * custom_modules: my_example_module: example_variable: demo_dataset diff --git a/default.config.yaml b/default.config.yaml deleted file mode 100644 index ce3c0a5aa..000000000 --- a/default.config.yaml +++ /dev/null @@ -1,15 +0,0 @@ -cicd_clientId: ${IDP_CLIENT_ID} -cicd_clientSecret: ${IDP_CLIENT_SECRET} -cicd_tokenUri: ${IDP_TOKEN_URL} -# Optional: If idP requires providing the scopes -cdfProjectName: ${CDF_PROJECT} -cicd_scopes: - - ${IDP_SCOPES} -# Optional: If idP requires providing the audience -cicd_audience: ${IDP_AUDIENCE} -# Seven minutes past each hour -scheduleHourly: '7 * * * *' -# Every fifteen minutes -scheduleQuarterly: '0/15 * * * *' -# Daily at 1:35 AM -scheduleDaily: '35 1 * * *' \ No newline at end of file diff --git a/tests/test_approval_modules_snapshots/cdf_apm_simple.yaml b/tests/test_approval_modules_snapshots/cdf_apm_simple.yaml index f1e921d33..8095b5f7e 100644 --- a/tests/test_approval_modules_snapshots/cdf_apm_simple.yaml +++ b/tests/test_approval_modules_snapshots/cdf_apm_simple.yaml @@ -892,7 +892,7 @@ Transformation: from\n `apm_simple`.`assets`;\n" schedule: externalId: apm_simple-load-asset-hierarchy - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -928,7 +928,7 @@ Transformation: \ as endNode\nfrom\n `apm_simple`.`asset2children`;\n" schedule: externalId: apm_simple-load-asset2children - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -969,7 +969,7 @@ Transformation: \ as BOOLEAN) as isActive\nfrom\n `apm_simple`.`assets`;\n" schedule: externalId: apm_simple-load-assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -1004,7 +1004,7 @@ Transformation: \ metrics\nfrom\n `apm_simple`.`timeseries2assets`;\n" schedule: externalId: apm_simple-load-timeseries2assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -1043,7 +1043,7 @@ Transformation: \ method,\n cast(`isCompleted` as BOOLEAN) as isCompleted\nfrom\n `apm_simple`.`workitems`;\n" schedule: externalId: apm_simple-load-workitems - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -1085,7 +1085,7 @@ Transformation: from\n `apm_simple`.`workitem2assets`;\n" schedule: externalId: apm_simple-load-workitems2assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -1127,7 +1127,7 @@ Transformation: from\n `apm_simple`.`workorder2items`;\n" schedule: externalId: apm_simple-load-workitems2workorders - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -1173,7 +1173,7 @@ Transformation: \ STRING) as programNumber \nfrom `apm_simple`.`workorders`;\n" schedule: externalId: apm_simple-load-workorders - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -1209,7 +1209,7 @@ Transformation: \ as endNode\nfrom\n `apm_simple`.`workorder2assets`;\n" schedule: externalId: apm_simple-load-workorders2assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -1220,31 +1220,31 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: apm_simple-load-asset-hierarchy - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: apm_simple-load-asset2children - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: apm_simple-load-assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: apm_simple-load-timeseries2assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: apm_simple-load-workitems - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: apm_simple-load-workitems2assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: apm_simple-load-workitems2workorders - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: apm_simple-load-workorders - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: apm_simple-load-workorders2assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true View: - externalId: Asset diff --git a/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml b/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml index 2797dab5d..22bdc8b6f 100644 --- a/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml +++ b/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml @@ -358,7 +358,7 @@ Transformation: \ as endNode\nfrom\n `asset_oid_workmate`.`asset2children`;\n" schedule: externalId: tr_asset_oid_workmate_apm_simple_load_asset2children - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -399,7 +399,7 @@ Transformation: \ as BOOLEAN) as isActive\nfrom\n `asset_oid_workmate`.`assets`;\n" schedule: externalId: tr_asset_oid_workmate_apm_simple_load_assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -434,7 +434,7 @@ Transformation: \ metrics\nfrom\n `files_oid_{{source_files}}`.`timeseries2assets`;\n" schedule: externalId: tr_timeseries_oid_pi_apm_simple_load_timeseries2assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -473,7 +473,7 @@ Transformation: \ method,\n cast(`isCompleted` as BOOLEAN) as isCompleted\nfrom\n `workorder_oid_workmate`.`workitems`;\n" schedule: externalId: tr_workorder_oid_workmate_apm_simple_load_workitems - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -515,7 +515,7 @@ Transformation: from\n `workorder_oid_workmate`.`workitem2assets`;\n" schedule: externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -557,7 +557,7 @@ Transformation: from\n `workorder_oid_workmate`.`workorder2items`;\n" schedule: externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2workorders - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -603,7 +603,7 @@ Transformation: \ STRING) as programNumber \nfrom `workorder_oid_workmate`.`workorders`;\n" schedule: externalId: tr_workorder_oid_workmate_apm_simple_load_workorders - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -639,7 +639,7 @@ Transformation: \ as endNode\nfrom\n `workorder_oid_workmate`.`workorder2assets`;\n" schedule: externalId: tr_workorder_oid_workmate_apm_simple_load_workorders2assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -650,28 +650,28 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: tr_asset_oid_workmate_apm_simple_load_asset2children - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: tr_asset_oid_workmate_apm_simple_load_assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: tr_timeseries_oid_pi_apm_simple_load_timeseries2assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2workorders - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders2assets - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true View: - externalId: Asset diff --git a/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml b/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml index ed459152e..12f72714c 100644 --- a/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml +++ b/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml @@ -78,7 +78,7 @@ Transformation: \ 'Pump');\n" schedule: externalId: sync-asset_hierarchy_cdf_asset_source_model - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -89,7 +89,7 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: sync-asset_hierarchy_cdf_asset_source_model - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true View: - externalId: Asset diff --git a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml index be17db43f..bb97bca62 100644 --- a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml +++ b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml @@ -398,7 +398,7 @@ Transformation: \ = rootAsset.id" schedule: externalId: tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: false sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -435,7 +435,7 @@ Transformation: \ join cdf_assetSubtree('WMT:VAL') as rootAsset on asset.rootId = rootAsset.id" schedule: externalId: tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: false sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -475,7 +475,7 @@ Transformation: \ 'workmate' as source\n from\n `workorder_oid_workmate`.`workorders`;\n" schedule: externalId: tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: false sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -486,13 +486,13 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: false - externalId: tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: false - externalId: tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: false deleted: Transformation: diff --git a/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml b/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml index 154de1788..aaf12b0fa 100644 --- a/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml +++ b/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml @@ -688,7 +688,7 @@ Transformation: from\n `asset_oid_workmate`.`assets`;\n" schedule: externalId: tr_asset_oid_workmate_asset_hierarchy - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: false sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -699,7 +699,7 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: tr_asset_oid_workmate_asset_hierarchy - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: false deleted: FileMetadata: diff --git a/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml b/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml index da1e90bef..4c948107f 100644 --- a/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml +++ b/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml @@ -78,7 +78,7 @@ Transformation: \ LifeCycleStatus,\n LocationDescription\n ) as metadata\nfrom pump_assets.`collections_pump`\n" schedule: externalId: pump_asset_hierarchy-load-collections_pump - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -89,7 +89,7 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: pump_asset_hierarchy-load-collections_pump - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true deleted: Table: diff --git a/tests/test_approval_modules_snapshots/example_pump_data_model.yaml b/tests/test_approval_modules_snapshots/example_pump_data_model.yaml index 11c9e5354..3efb1229a 100644 --- a/tests/test_approval_modules_snapshots/example_pump_data_model.yaml +++ b/tests/test_approval_modules_snapshots/example_pump_data_model.yaml @@ -93,7 +93,7 @@ Transformation: , \"ExtendedSourceData\", \"1\", \"Asset\")\nwhere\n startswith(title, 'Pump')\n" schedule: externalId: pump_model-populate-lift_station_pumps_edges - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -133,7 +133,7 @@ Transformation: \ startswith(title, 'Pump')\n" schedule: externalId: pump_model-populate-pump_container - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -144,10 +144,10 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: pump_model-populate-lift_station_pumps_edges - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true - externalId: pump_model-populate-pump_container - interval: '{{scheduleHourly}}' + interval: 7 * * * * isPaused: true View: - externalId: LiftStation From 4e9739e04f2d9de9cdcf0f9608c7d4be5521fe3a Mon Sep 17 00:00:00 2001 From: Greger Wedel Date: Mon, 11 Dec 2023 16:55:07 +0100 Subject: [PATCH 28/90] Update changelog --- CHANGELOG.templates.md | 13 ++++++++++++- cognite_toolkit/config.yaml | 7 ------- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index 44c727114..101c30797 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -13,15 +13,26 @@ Changes are grouped as follows: - `Removed` for now removed features. - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. -- + ## TBD - 2023-12-TBD + ### Added + - Explicitly define model `space` in `experimental/cdf_asset_source_model/` and `experimental/example_pump_model/`. - The module `my_example_module` has been added to the `custom_modules` folder. +- Added globally defined schedule variables that can be used across all modules. + ### Changed + - All cognite templates have been moved into `cognite_templates` folder, while `local_templates` is renamed to `custom_templates`. - Move cdf_apm_base into separate folder. - The file `local.yaml` has been renamed `environments.yaml` to better reflect its purpose. +- Changed the isPaused flag to use a module-level variable instead of hardcoded in cdf_apm_simple_data_model. + +### Fixed + +- Removed transformation identity provider variables from modules and reused the global cicd_ prefixed ones. + ## [0.2.0] - 2023-12-01 ### Changed diff --git a/cognite_toolkit/config.yaml b/cognite_toolkit/config.yaml index fbb25c57d..b28936215 100644 --- a/cognite_toolkit/config.yaml +++ b/cognite_toolkit/config.yaml @@ -18,13 +18,6 @@ cognite_modules: view_Asset_version: '1' view_WorkOrder_version: '1' view_WorkItem_version: '1' - # In addition, the transformations are using the following variables from the global scope (your root default.config.yaml): - #cicd_clientId - #cicd_clientSecret - #cicd_tokenUri - #cdfProjectName - #cicd_scopes - #cicd_audience pause_transformations: true cdf_oid_example_data: # Only valid for this module, loads template variables from environment From 82ff72ac89371dff5ce680b499cc71fe4978f6c7 Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Mon, 11 Dec 2023 17:09:44 +0100 Subject: [PATCH 29/90] [CDF-20450] Demo variables set in single config. (#207) * refactor: Updated demo variables * refactor: missing config file * fix: Removed source ids * tests: fix test data * build: changelog --- .github/workflows/build.yml | 2 +- .github/workflows/demo.yml | 2 +- CHANGELOG.templates.md | 1 + .../cdf_infield_location/default.config.yaml | 8 +++--- cognite_toolkit/config.yaml | 8 +++--- demo/README.md | 2 +- demo/config.cdf_auth_readwrite_all.yaml | 2 -- demo/config.cdf_infield_common.yaml | 1 - demo/config.yaml | 7 ++++++ demo/{local.yaml => environments.yaml} | 0 demo/preproc.py | 25 +++++++++++++++++++ demo/preproc.sh | 10 -------- .../cdf_infield_location.yaml | 8 +++--- 13 files changed, 48 insertions(+), 28 deletions(-) delete mode 100644 demo/config.cdf_auth_readwrite_all.yaml delete mode 100644 demo/config.cdf_infield_common.yaml create mode 100644 demo/config.yaml rename demo/{local.yaml => environments.yaml} (100%) create mode 100644 demo/preproc.py delete mode 100755 demo/preproc.sh diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 7a8b7f417..edddd5dc9 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -68,7 +68,7 @@ jobs: - name: Initialize project run: cdf-tk init demo_project - name: "Pre-processing for demo environment" - run: ./demo/preproc.sh + run: python ./demo/preproc.py - name: "Build the templates" run: cdf-tk build --build-dir=./build --env=demo ./demo_project - name: "Verify and create access rights" diff --git a/.github/workflows/demo.yml b/.github/workflows/demo.yml index ff967fd5e..69b20f9bc 100644 --- a/.github/workflows/demo.yml +++ b/.github/workflows/demo.yml @@ -34,7 +34,7 @@ jobs: - name: Initialize project run: cdf-tk init demo_project - name: "Pre-processing for demo environment" - run: ./demo/preproc.sh + run: python ./demo/preproc.py - name: "Build the templates" run: cdf-tk build --build-dir=./build --env=demo ./demo_project # be careful, this works as promised diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index 44c727114..25848f251 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -22,6 +22,7 @@ Changes are grouped as follows: - All cognite templates have been moved into `cognite_templates` folder, while `local_templates` is renamed to `custom_templates`. - Move cdf_apm_base into separate folder. - The file `local.yaml` has been renamed `environments.yaml` to better reflect its purpose. +- Removed demo `sourceId` from `cdf_infield_location` module. ## [0.2.0] - 2023-12-01 ### Changed diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/default.config.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/default.config.yaml index ebf70de50..902b5f429 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/default.config.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/default.config.yaml @@ -21,10 +21,10 @@ root_asset_external_id: 'WMT:VAL' # the following properties are required for # infield and must be updated for each location -infield_default_location_checklist_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f -infield_default_location_normal_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e -infield_default_location_template_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f -infield_default_location_viewer_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e +infield_default_location_checklist_admin_users_source_id: +infield_default_location_normal_users_source_id: +infield_default_location_template_admin_users_source_id: +infield_default_location_viewer_users_source_id: # Transformation credentials clientId: ${IDP_CLIENT_ID} diff --git a/cognite_toolkit/config.yaml b/cognite_toolkit/config.yaml index 42f66af00..fee89324a 100644 --- a/cognite_toolkit/config.yaml +++ b/cognite_toolkit/config.yaml @@ -101,10 +101,10 @@ cognite_modules: # the following properties are required for # infield and must be updated for each location - infield_default_location_checklist_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f - infield_default_location_normal_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e - infield_default_location_template_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f - infield_default_location_viewer_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e + infield_default_location_checklist_admin_users_source_id: + infield_default_location_normal_users_source_id: + infield_default_location_template_admin_users_source_id: + infield_default_location_viewer_users_source_id: # Transformation credentials clientId: ${IDP_CLIENT_ID} diff --git a/demo/README.md b/demo/README.md index 5459432f0..7dacf1635 100644 --- a/demo/README.md +++ b/demo/README.md @@ -1,6 +1,6 @@ # demo directory -This directory contains configuration files and a preproc.sh script used +This directory contains configuration files and a preproc.py script used as part of deploying the default demo to a Cognite-internal demo project. You can delete this directory if you are not using the demo functionality for your own purposes. diff --git a/demo/config.cdf_auth_readwrite_all.yaml b/demo/config.cdf_auth_readwrite_all.yaml deleted file mode 100644 index a23f4dacb..000000000 --- a/demo/config.cdf_auth_readwrite_all.yaml +++ /dev/null @@ -1,2 +0,0 @@ -readwrite_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f -readonly_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e \ No newline at end of file diff --git a/demo/config.cdf_infield_common.yaml b/demo/config.cdf_infield_common.yaml deleted file mode 100644 index ac58ccfa3..000000000 --- a/demo/config.cdf_infield_common.yaml +++ /dev/null @@ -1 +0,0 @@ -applicationsconfiguration_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e diff --git a/demo/config.yaml b/demo/config.yaml new file mode 100644 index 000000000..4374754cf --- /dev/null +++ b/demo/config.yaml @@ -0,0 +1,7 @@ +readwrite_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f +readonly_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e +applicationsconfiguration_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e +infield_default_location_checklist_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f +infield_default_location_normal_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e +infield_default_location_template_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f +infield_default_location_viewer_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e diff --git a/demo/local.yaml b/demo/environments.yaml similarity index 100% rename from demo/local.yaml rename to demo/environments.yaml diff --git a/demo/preproc.py b/demo/preproc.py new file mode 100644 index 000000000..63b86fc3a --- /dev/null +++ b/demo/preproc.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python +import shutil +from pathlib import Path + +import yaml + +THIS_FOLDER = Path(__file__).parent.absolute() +DEMO_PROJECT = THIS_FOLDER.parent / "demo_project" + + +def run() -> None: + print("Running copy commands to prep deployment of demo...") + print("Copying my enviroments.yaml to root of repo...") + shutil.copy(THIS_FOLDER / "environments.yaml", DEMO_PROJECT) + print("Copying config.yaml into demo project...") + config_yaml_path = DEMO_PROJECT / "config.yaml" + variables = yaml.safe_load((THIS_FOLDER / "config.yaml").read_text()) + config_yaml = config_yaml_path.read_text() + for key, value in variables.items(): + config_yaml = config_yaml.replace(f"{key}: ", f"{key}: {value}") + config_yaml_path.write_text(config_yaml) + + +if __name__ == "__main__": + run() diff --git a/demo/preproc.sh b/demo/preproc.sh deleted file mode 100755 index a67104506..000000000 --- a/demo/preproc.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -echo "Running copy commands to prep deployment of demo..." -pushd `dirname $0` -echo "Copying my local.yaml to root of repo..." -cp local.yaml ../demo_project/ -echo "Copying config.yaml into cdf_auth_readwrite_all module..." -cp config.cdf_auth_readwrite_all.yaml ../demo_project/common/cdf_auth_readwrite_all/config.yaml -echo "Copying config.yaml into cdf_infield_common module..." -cp config.cdf_infield_common.yaml ../demo_project/modules/cdf_infield_common/config.yaml -popd \ No newline at end of file diff --git a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml index 7a6c719ed..fd6dd72dd 100644 --- a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml +++ b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml @@ -90,7 +90,7 @@ Group: module_version: '1' origin: cdf-project-templates name: gp_infield_oid_checklist_admins - sourceId: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f + sourceId: - capabilities: - threedAcl: actions: @@ -178,7 +178,7 @@ Group: module_version: '1' origin: cdf-project-templates name: gp_infield_oid_normal_users - sourceId: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e + sourceId: - capabilities: - threedAcl: actions: @@ -266,7 +266,7 @@ Group: module_version: '1' origin: cdf-project-templates name: gp_infield_oid_template_admins - sourceId: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f + sourceId: - capabilities: - threedAcl: actions: @@ -339,7 +339,7 @@ Group: module_version: '1' origin: cdf-project-templates name: gp_infield_oid_viewers - sourceId: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e + sourceId: Node: - externalId: default_infield_config_minimal instanceType: node From e6b27ce10d54b3d4db60aecc69bec9cac2219084 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 09:16:57 +0100 Subject: [PATCH 30/90] Fixed test --- .../cdf_infield_location.yaml | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml index fd6dd72dd..488e9bd71 100644 --- a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml +++ b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml @@ -3,6 +3,18 @@ DataSet: externalId: ds_infield_oid_app_data name: infield:oid:app_data Group: +- capabilities: + - groupsAcl: + actions: + - LIST + - READ + scope: + currentuserscope: {} + metadata: + module_version: '1' + origin: cdf-project-templates + name: gp_infield_oid_checklist_admins + sourceId: - capabilities: - threedAcl: actions: @@ -91,6 +103,18 @@ Group: origin: cdf-project-templates name: gp_infield_oid_checklist_admins sourceId: +- capabilities: + - groupsAcl: + actions: + - LIST + - READ + scope: + currentuserscope: {} + metadata: + module_version: '1' + origin: cdf-project-templates + name: gp_infield_oid_normal_users + sourceId: - capabilities: - threedAcl: actions: @@ -179,6 +203,18 @@ Group: origin: cdf-project-templates name: gp_infield_oid_normal_users sourceId: +- capabilities: + - groupsAcl: + actions: + - LIST + - READ + scope: + currentuserscope: {} + metadata: + module_version: '1' + origin: cdf-project-templates + name: gp_infield_oid_template_admins + sourceId: - capabilities: - threedAcl: actions: @@ -267,6 +303,18 @@ Group: origin: cdf-project-templates name: gp_infield_oid_template_admins sourceId: +- capabilities: + - groupsAcl: + actions: + - LIST + - READ + scope: + currentuserscope: {} + metadata: + module_version: '1' + origin: cdf-project-templates + name: gp_infield_oid_viewers + sourceId: - capabilities: - threedAcl: actions: From 56df7561aea267f8066581c70e347b3b940c0172 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 09:39:55 +0100 Subject: [PATCH 31/90] Syncing --- cognite_toolkit/config.yaml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/cognite_toolkit/config.yaml b/cognite_toolkit/config.yaml index d66b6f959..7a1fa4890 100644 --- a/cognite_toolkit/config.yaml +++ b/cognite_toolkit/config.yaml @@ -41,6 +41,22 @@ cognite_modules: # Only valid for this module, loads template variables from environment raw_db: pump_assets data_set: src:lift_pump_stations + cdf_data_pipeline_asset_valhall: + location_name: oid + module_version: '1' + source_name: workmate + asset_dataset: ds_asset_oid + asset_raw_input_db: asset_oid_workmate + asset_raw_input_table: assets + asset_location_extractor_group_source_id: + asset_location_processing_group_source_id: + asset_location_read_group_source_id: + clientId: ${IDP_CLIENT_ID} + clientSecret: ${IDP_CLIENT_SECRET} + tokenUri: ${IDP_TOKEN_URL} + cdfProjectName: ${CDF_PROJECT} + scopes: ${IDP_SCOPES} + audience: ${IDP_AUDIENCE} experimental: cdf_asset_source_model: # Only valid for this module, loads template variables from environment From d11cd03d6aadaf694fcd85c4b489ef8f5546a98d Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Tue, 12 Dec 2023 09:57:46 +0100 Subject: [PATCH 32/90] Build checks (#210) * tests: Added failing test * refactor: Warning on * feat: DataSet missing * build; changelog * refactor: some cleanup --- CHANGELOG.cdf-tk.md | 2 + cognite_toolkit/cdf_tk/templates.py | 27 ++- cognite_toolkit/cdf_tk/utils.py | 156 ++++++++++++++++-- tests/test_cdf_tk/data/config.yaml | 14 ++ .../a_module/auth/readonly.all.group.yaml | 11 ++ tests/test_cdf_tk/test_templates.py | 8 +- tests/test_cdf_tk/test_utils.py | 57 ++++++- 7 files changed, 244 insertions(+), 31 deletions(-) create mode 100644 tests/test_cdf_tk/data/config.yaml create mode 100644 tests/test_cdf_tk/project_configs/cognite_modules/a_module/auth/readonly.all.group.yaml diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index 59873b868..58d55fef1 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -19,6 +19,8 @@ Changes are grouped as follows: ### Added - Warnings if a configuration file is using `snake_case` when then resource type is expecting `camelCase`. - Added support for validation of `space` for data models. +- Check for whether template variables `` are present in the config files. +- Check for whether data set id is present in the config files. ### Removed - In the `deploy` command `drop_data` option has been removed. To drop data, use the `clean` command instead. ### Changed diff --git a/cognite_toolkit/cdf_tk/templates.py b/cognite_toolkit/cdf_tk/templates.py index 09e2a78fb..3215c0bdb 100644 --- a/cognite_toolkit/cdf_tk/templates.py +++ b/cognite_toolkit/cdf_tk/templates.py @@ -16,7 +16,7 @@ from ruamel.yaml import YAML, CommentedMap from cognite_toolkit.cdf_tk.load import LOADER_BY_FOLDER_NAME -from cognite_toolkit.cdf_tk.utils import LoadWarning, validate_case_raw +from cognite_toolkit.cdf_tk.utils import validate_case_raw, validate_config_yaml, validate_data_set_is_set # This is the default config located locally in each module. DEFAULT_CONFIG_FILE = "default.config.yaml" @@ -489,22 +489,14 @@ def build_config( selected_modules = get_selected_modules(source_module_dir, environment_file, build_env, verbose) config = read_yaml_file(config_file) + warnings = validate_config_yaml(config, config_file) + if warnings: + print(" [bold yellow]WARNING:[/] Found the following warnings in config.yaml:") + for warning in warnings: + print(f" {warning}") process_config_files(source_module_dir, selected_modules, build_dir, config, build_env, verbose) -def generate_warnings_report(load_warnings: list[LoadWarning], indent: int = 0) -> str: - report = [""] - for (file, identifier, id_name), file_warnings in itertools.groupby( - sorted(load_warnings), key=lambda w: (w.filepath, w.id_value, w.id_name) - ): - report.append(f"{' '*indent}In File {str(file)!r}") - report.append(f"{' '*indent}In entry {id_name}={identifier!r}") - for warning in file_warnings: - report.append(f"{' '*(indent+1)}{warning!s}") - - return "\n".join(report) - - def generate_config( directory: Path | Sequence[Path], include_modules: set[str] | None = None, existing_config: str | None = None ) -> tuple[str, ConfigEntries]: @@ -808,12 +800,17 @@ def validate(content: str, destination: Path, source_path: Path) -> None: loader = loader[0] else: loader = next((loader for loader in loader if re.match(loader.filename_pattern, destination.stem)), None) + if loader: load_warnings = validate_case_raw( parsed, loader.resource_cls, destination, identifier_key=loader.identifier_key ) if load_warnings: - print(f" [bold yellow]WARNING:[/]{generate_warnings_report(load_warnings, indent=1)}") + print(f" [bold yellow]WARNING:[/] Found potential snake_case issues: {load_warnings!s}") + + data_set_warnings = validate_data_set_is_set(parsed, loader.resource_cls, destination) + if data_set_warnings: + print(f" [bold yellow]WARNING:[/] Found missing data_sets: {data_set_warnings!s}") if __name__ == "__main__": diff --git a/cognite_toolkit/cdf_tk/utils.py b/cognite_toolkit/cdf_tk/utils.py index 8d5c8645a..f7024f3c1 100644 --- a/cognite_toolkit/cdf_tk/utils.py +++ b/cognite_toolkit/cdf_tk/utils.py @@ -16,15 +16,18 @@ import abc import collections import inspect +import itertools import json import logging import os +import re import typing -from collections.abc import Sequence +from collections import UserList +from collections.abc import Collection, Sequence from dataclasses import dataclass from functools import total_ordering from pathlib import Path -from typing import Any, get_origin +from typing import Any, ClassVar, Generic, TypeVar, get_origin import yaml from cognite.client import ClientConfig, CogniteClient @@ -388,6 +391,7 @@ def load_yaml_inject_variables(filepath: Path, variables: dict[str, str]) -> dic @dataclass(frozen=True) class LoadWarning: + _type: ClassVar[str] filepath: Path id_value: str id_name: str @@ -395,12 +399,12 @@ class LoadWarning: @total_ordering @dataclass(frozen=True) -class CaseWarning(LoadWarning): +class SnakeCaseWarning(LoadWarning): actual: str expected: str - def __lt__(self, other: CaseWarning) -> bool: - if not isinstance(other, CaseWarning): + def __lt__(self, other: SnakeCaseWarning) -> bool: + if not isinstance(other, SnakeCaseWarning): return NotImplemented return (self.filepath, self.id_value, self.expected, self.actual) < ( other.filepath, @@ -409,8 +413,8 @@ def __lt__(self, other: CaseWarning) -> bool: other.actual, ) - def __eq__(self, other: CaseWarning) -> bool: - if not isinstance(other, CaseWarning): + def __eq__(self, other: SnakeCaseWarning) -> bool: + if not isinstance(other, SnakeCaseWarning): return NotImplemented return (self.filepath, self.id_value, self.expected, self.actual) == ( other.filepath, @@ -423,12 +427,95 @@ def __str__(self): return f"CaseWarning: Got {self.actual!r}. Did you mean {self.expected!r}?" +@total_ordering +@dataclass(frozen=True) +class TemplateVariableWarning(LoadWarning): + path: str + + def __lt__(self, other: TemplateVariableWarning) -> bool: + if not isinstance(other, TemplateVariableWarning): + return NotImplemented + return (self.id_name, self.id_value, self.path) < (other.id_name, other.id_value, other.path) + + def __eq__(self, other: TemplateVariableWarning) -> bool: + if not isinstance(other, TemplateVariableWarning): + return NotImplemented + return (self.id_name, self.id_value, self.path) == (other.id_name, other.id_value, other.path) + + def __str__(self): + return f"{type(self).__name__}: Variable {self.id_name!r} has value {self.id_value!r} in file: {self.filepath.name}. Did you forget to change it?" + + +@total_ordering +@dataclass(frozen=True) +class DataSetMissingWarning(LoadWarning): + resource_name: str + + def __lt__(self, other: DataSetMissingWarning) -> bool: + if not isinstance(other, DataSetMissingWarning): + return NotImplemented + return (self.id_name, self.id_value, self.filepath) < (other.id_name, other.id_value, other.filepath) + + def __eq__(self, other: DataSetMissingWarning) -> bool: + if not isinstance(other, DataSetMissingWarning): + return NotImplemented + return (self.id_name, self.id_value, self.filepath) == (other.id_name, other.id_value, other.filepath) + + def __str__(self): + return f"{type(self).__name__}: It is recommended that you set dataSetExternalId for {self.resource_name}. This is missing in {self.filepath.name}. Did you forget to add it?" + + +T_Warning = TypeVar("T_Warning", bound=LoadWarning) + + +class Warnings(UserList, Generic[T_Warning]): + def __init__(self, collection: Collection[T_Warning] | None = None): + super().__init__(collection or []) + + +class SnakeCaseWarningList(Warnings[SnakeCaseWarning]): + def __str__(self) -> str: + output = [""] + for (file, identifier, id_name), file_warnings in itertools.groupby( + sorted(self), key=lambda w: (w.filepath, w.id_value, w.id_name) + ): + output.append(f" In File {str(file)!r}") + output.append(f" In entry {id_name}={identifier!r}") + for warning in file_warnings: + output.append(f"{' ' * 2}{warning!s}") + + return "\n".join(output) + + +class TemplateVariableWarningList(Warnings[TemplateVariableWarning]): + def __str__(self): + output = [""] + for path, module_warnings in itertools.groupby(sorted(self), key=lambda w: w.path): + if path: + output.append(f" In Section {str(path)!r}") + for warning in module_warnings: + output.append(f"{' ' * 2}{warning!s}") + + return "\n".join(output) + + +class DataSetMissingWarningList(Warnings[DataSetMissingWarning]): + def __str__(self): + output = [""] + for filepath, warnings in itertools.groupby(sorted(self), key=lambda w: w.filepath): + output.append(f" In file {str(filepath)!r}") + for warning in warnings: + output.append(f"{' ' * 2}{warning!s}") + + return "\n".join(output) + + def validate_case_raw( raw: dict[str, Any] | list[dict[str, Any]], resource_cls: type[CogniteObject], filepath: Path, identifier_key: str = "externalId", -) -> list[CaseWarning]: +) -> SnakeCaseWarningList: """Checks whether camel casing the raw data would match a parameter in the resource class. Args: @@ -452,8 +539,8 @@ def _validate_case_raw( filepath: Path, identifier_key: str = "externalId", identifier_value: str = "", -) -> list[CaseWarning]: - warnings = [] +) -> SnakeCaseWarningList: + warnings = SnakeCaseWarningList() if isinstance(raw, list): for item in raw: warnings.extend(_validate_case_raw(item, resource_cls, filepath, identifier_key)) @@ -486,7 +573,7 @@ def _validate_case_raw( for key in snake_cased: if (camel_key := to_camel_case(key)) in expected: - warnings.append(CaseWarning(filepath, identifier_value, identifier_key, str(key), str(camel_key))) + warnings.append(SnakeCaseWarning(filepath, identifier_value, identifier_key, str(key), str(camel_key))) try: type_hints_by_name = _TypeHints.get_type_hints_by_name(signature, resource_cls) @@ -518,3 +605,50 @@ def _validate_case_raw( ) return warnings + + +def validate_config_yaml(config: dict[str, Any], filepath: Path, path: str = "") -> TemplateVariableWarningList: + """Checks whether the config file has any issues. + + Currently, this checks for: + * Non-replaced template variables, such as . + + Args: + config: The config to check. + filepath: The filepath of the config.yaml. + path: The path in the config.yaml. This is used recursively by this function. + """ + warnings = TemplateVariableWarningList() + pattern = re.compile(r"<.*?>") + for key, value in config.items(): + if isinstance(value, str) and pattern.match(value): + warnings.append(TemplateVariableWarning(filepath, value, key, path)) + elif isinstance(value, dict): + if path: + path += "." + warnings.extend(validate_config_yaml(value, filepath, f"{path}{key}")) + return warnings + + +def validate_data_set_is_set( + raw: dict[str, Any] | list[dict[str, Any]], + resource_cls: type[CogniteObject], + filepath: Path, + identifier_key: str = "externalId", +) -> DataSetMissingWarningList: + warnings = DataSetMissingWarningList() + signature = inspect.signature(resource_cls.__init__) + if "data_set_id" not in set(signature.parameters.keys()): + return warnings + + if isinstance(raw, list): + for item in raw: + warnings.extend(validate_data_set_is_set(item, resource_cls, filepath, identifier_key)) + return warnings + + if "dataSetExternalId" in raw or "dataSetId" in raw: + return warnings + + value = raw.get(identifier_key, raw.get(to_snake_case(identifier_key), f"No identifier {identifier_key}")) + warnings.append(DataSetMissingWarning(filepath, value, identifier_key, resource_cls.__name__)) + return warnings diff --git a/tests/test_cdf_tk/data/config.yaml b/tests/test_cdf_tk/data/config.yaml new file mode 100644 index 000000000..3578a3754 --- /dev/null +++ b/tests/test_cdf_tk/data/config.yaml @@ -0,0 +1,14 @@ +cognite_modules: + a_module: + readwrite_source_id: + readonly_source_id: + another_module: + default_location: oid + source_asset: workmate + source_workorder: workmate + source_files: fileshare + source_timeseries: pi + top_variable: + parent_module: + child_module: + child_variable: diff --git a/tests/test_cdf_tk/project_configs/cognite_modules/a_module/auth/readonly.all.group.yaml b/tests/test_cdf_tk/project_configs/cognite_modules/a_module/auth/readonly.all.group.yaml new file mode 100644 index 000000000..ffcd7b7d2 --- /dev/null +++ b/tests/test_cdf_tk/project_configs/cognite_modules/a_module/auth/readonly.all.group.yaml @@ -0,0 +1,11 @@ +name: 'gp_cicd_all_read_only' +sourceId: '{{readonly_source_id}}' +metadata: + origin: 'cdf-project-templates' +capabilities: + - projectsAcl: + actions: + - LIST + - READ + scope: + all: {} diff --git a/tests/test_cdf_tk/test_templates.py b/tests/test_cdf_tk/test_templates.py index d158b6e91..bf7171da5 100644 --- a/tests/test_cdf_tk/test_templates.py +++ b/tests/test_cdf_tk/test_templates.py @@ -6,9 +6,15 @@ import pytest import yaml -from cognite_toolkit.cdf_tk.templates import COGNITE_MODULES, create_local_config, generate_config, split_config +from cognite_toolkit.cdf_tk.templates import ( + COGNITE_MODULES, + create_local_config, + generate_config, + split_config, +) BUILD_CONFIG = Path(__file__).parent / "project_configs" +DATA = Path(__file__).parent / "data" def generate_config_test_cases(): diff --git a/tests/test_cdf_tk/test_utils.py b/tests/test_cdf_tk/test_utils.py index 30ff85299..d1106f515 100644 --- a/tests/test_cdf_tk/test_utils.py +++ b/tests/test_cdf_tk/test_utils.py @@ -1,4 +1,5 @@ from pathlib import Path +from typing import Any from unittest.mock import Mock, patch import pytest @@ -16,7 +17,16 @@ from cognite.client.exceptions import CogniteAuthError from cognite.client.testing import CogniteClientMock -from cognite_toolkit.cdf_tk.utils import CaseWarning, CDFToolConfig, load_yaml_inject_variables, validate_case_raw +from cognite_toolkit.cdf_tk.utils import ( + CDFToolConfig, + DataSetMissingWarning, + SnakeCaseWarning, + TemplateVariableWarning, + load_yaml_inject_variables, + validate_case_raw, + validate_config_yaml, + validate_data_set_is_set, +) THIS_FOLDER = Path(__file__).resolve().parent @@ -87,8 +97,8 @@ def test_validate_raw() -> None: assert len(warnings) == 2 assert sorted(warnings) == sorted( [ - CaseWarning(raw_file, "wrong_case", "externalId", "is_string", "isString"), - CaseWarning(raw_file, "wrong_case", "externalId", "is_step", "isStep"), + SnakeCaseWarning(raw_file, "wrong_case", "externalId", "is_string", "isString"), + SnakeCaseWarning(raw_file, "wrong_case", "externalId", "is_step", "isStep"), ] ) @@ -99,5 +109,44 @@ def test_validate_raw_nested() -> None: assert len(warnings) == 1 assert warnings == [ - CaseWarning(raw_file, "WorkItem", "externalId", "container_property_identifier", "containerPropertyIdentifier") + SnakeCaseWarning( + raw_file, "WorkItem", "externalId", "container_property_identifier", "containerPropertyIdentifier" + ) ] + + +@pytest.mark.parametrize( + "config_yaml, expected_warnings", + [ + pytest.param( + {"sourceId": ""}, + [TemplateVariableWarning(Path("config.yaml"), "", "sourceId", "")], + id="Single warning", + ), + pytest.param( + {"a_module": {"sourceId": ""}}, + [TemplateVariableWarning(Path("config.yaml"), "", "sourceId", "a_module")], + id="Nested warning", + ), + pytest.param( + {"a_super_module": {"a_module": {"sourceId": ""}}}, + [TemplateVariableWarning(Path("config.yaml"), "", "sourceId", "a_super_module.a_module")], + id="Deep nested warning", + ), + pytest.param({"a_module": {"sourceId": "123"}}, [], id="No warning"), + ], +) +def test_validate_config_yaml(config_yaml: dict[str, Any], expected_warnings: list[TemplateVariableWarning]) -> None: + warnings = validate_config_yaml(config_yaml, Path("config.yaml")) + + assert sorted(warnings) == sorted(expected_warnings) + + +def test_validate_data_set_is_set(): + warnings = validate_data_set_is_set( + {"externalId": "myTimeSeries", "name": "My Time Series"}, TimeSeries, Path("timeseries.yaml") + ) + + assert sorted(warnings) == sorted( + [DataSetMissingWarning(Path("timeseries.yaml"), "myTimeSeries", "externalId", "TimeSeries")] + ) From 4c08aea41e17d305172517d4ac30c99515f89b0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 10:40:53 +0100 Subject: [PATCH 33/90] Wrong indentation barred config upload when --drop --- cognite_toolkit/cdf_tk/load.py | 63 ++++++++++--------- .../data_models/APM_Config.space.yaml | 1 - .../data_models/apm_data_model.space.yaml | 1 - .../data_models/apm_simple.space.yaml | 1 - .../data_sets/dataset.yaml | 2 +- .../default.config.yaml | 1 - .../source_asset_valhall_workmate.config.yaml | 1 - .../source_asset_valhall_workmate.yaml | 1 - .../data_models/infieldAppData.space.yaml | 1 - .../infieldLocationAppData.space.yaml | 1 - .../infieldLocationSourceData.space.yaml | 1 - .../cdf_data_pipeline_asset_valhall.yaml | 2 +- 12 files changed, 34 insertions(+), 42 deletions(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index efde969d9..285ce22df 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -854,39 +854,40 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path self.ToolGlobals.failed = True return None - file_name = filepath.stem.split(".", 2)[1] - config_file_name = f"{file_name}.config.yaml" - config_file = next( - ( - file - for file in Path(filepath.parent).iterdir() - if file.is_file() and file.name.endswith(config_file_name) - ), - None, + file_name = filepath.stem.split(".", 2)[1] + config_file_name = f"{file_name}.config.yaml" + config_file = next( + ( + file + for file in Path(filepath.parent).iterdir() + if file.is_file() and file.name.endswith(config_file_name) + ), + None, + ) + + if not config_file.exists(): + print( + f" [bold yellow]WARNING:[/] no config file for extraction pipeline found. Expected to find {config_file_name} in same folder as {file_name}" ) + return extractionPipelineList - if not config_file.exists(): - print( - f" [bold yellow]WARNING:[/] no config file for extraction pipeline found. Expected to find {config_file_name} in same folder as {file_name}" - ) - return extractionPipelineList - - resources = load_yaml_inject_variables(config_file, {}) - resources = [resources] if isinstance(resources, dict) else resources - - for resource in resources: - extractionPipelineConfig = ExtractionPipelineConfig.load( - { - "externalId": resource.get("externalId"), - "description": resource.get("description"), - "config": yaml.dump(resource.get("config", ""), indent=4), - } - ) - try: - self.client.extraction_pipelines.config.create(extractionPipelineConfig) - except Exception as e: - print(f"[bold red]ERROR:[/] Failed to create extraction pipeline config.\n{e}") - self.ToolGlobals.failed = True + resources = load_yaml_inject_variables(config_file, {}) + resources = [resources] if isinstance(resources, dict) else resources + + for resource in resources: + extractionPipelineConfig = ExtractionPipelineConfig.load( + { + "externalId": resource.get("externalId"), + "description": resource.get("description"), + "config": yaml.dump(resource.get("config", ""), indent=4), + } + ) + try: + self.client.extraction_pipelines.config.create(extractionPipelineConfig) + + except Exception as e: + print(f"[bold red]ERROR:[/] Failed to create extraction pipeline config.\n{e}") + self.ToolGlobals.failed = True return extractionPipelineList diff --git a/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/APM_Config.space.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/APM_Config.space.yaml index de9b98e35..0b3566879 100644 --- a/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/APM_Config.space.yaml +++ b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/APM_Config.space.yaml @@ -1,4 +1,3 @@ ---- space: APM_Config name: APM_Config description: Space for APM application configurations \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/apm_data_model.space.yaml b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/apm_data_model.space.yaml index fd093d6a2..391167a77 100644 --- a/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/apm_data_model.space.yaml +++ b/cognite_toolkit/cognite_modules/core/cdf_apm_base/data_models/apm_data_model.space.yaml @@ -1,4 +1,3 @@ ---- space: {{apm_datamodel_space}} name: {{apm_datamodel_space}} description: Space for APM data model \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/apm_simple.space.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/apm_simple.space.yaml index 78d10c465..fb838e606 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/apm_simple.space.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_models/apm_simple.space.yaml @@ -1,4 +1,3 @@ ---- space: {{space}} name: {{space}} description: Space for APM simple data model \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml index d2ffbaffb..e368feca6 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml @@ -10,4 +10,4 @@ metadata: tableName: "assets" transformations: - externalId: tr_asset_{{location_name}}_{{source_name}}_asset_hierarchy - type: "jetfire" + type: "Transformations" diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/default.config.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/default.config.yaml index 1cbad60dc..316727a22 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/default.config.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/default.config.yaml @@ -3,7 +3,6 @@ # The data originates from a single compressor on Aker BP’s Valhall oil platform # in the North Sea. Aker BP selected the first stage compressor on the Valhall # because it is a subsystem with clearly defined boundaries, rich in time series and maintenance data. ---- # spesify the site/asset location where data comes from, ex 'valhall_oid' or if they are generic for all assets use 'all' location_name: oid module_version: '1' diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml index 58b055fd9..0c71b4189 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml @@ -1,4 +1,3 @@ ---- externalId: 'ep_src_asset_{{location_name}}_{{source_name}}' description: 'DB extractor config reading data from {{location_name}}:{{source_name}}' config: diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml index 2e75ede3a..d6ea071bc 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.yaml @@ -1,4 +1,3 @@ ---- externalId: 'ep_src_asset_{{location_name}}_{{source_name}}' name: 'src:asset:{{location_name}}:{{source_name}}' dataSetExternalId: 'ds_asset_{{location_name}}' diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_common/data_models/infieldAppData.space.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_common/data_models/infieldAppData.space.yaml index c14e413af..6ae716dc9 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_common/data_models/infieldAppData.space.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_common/data_models/infieldAppData.space.yaml @@ -1,4 +1,3 @@ ---- space: cognite_app_data name: cognite_app_data description: Space for Infield App Data \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_models/infieldLocationAppData.space.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_models/infieldLocationAppData.space.yaml index 502d9927f..b8c55a417 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_models/infieldLocationAppData.space.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_models/infieldLocationAppData.space.yaml @@ -1,4 +1,3 @@ ---- space: sp_infield_{{default_location}}_app_data name: sp:infield:{{default_location}}:app_data description: Space for Infield Location App Data \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_models/infieldLocationSourceData.space.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_models/infieldLocationSourceData.space.yaml index 2c871a97e..e3a6b9421 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_models/infieldLocationSourceData.space.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_models/infieldLocationSourceData.space.yaml @@ -1,4 +1,3 @@ ---- space: sp_asset_{{default_location}}_source name: sp:infield:{{default_location}}:source description: Space for Infield Location Source Data \ No newline at end of file diff --git a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml index d709e3cc8..6e3e53901 100644 --- a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml +++ b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml @@ -5,7 +5,7 @@ DataSet: consoleSource: '{"names": ["workmate"]}' rawTables: '[{"databaseName": "asset_oid_workmate", "tableName": "assets"}]' transformations: '[{"externalId": "tr_asset_oid_workmate_asset_hierarchy", "type": - "jetfire"}]' + "Transformations"}]' name: asset:oid Group: - capabilities: From 1da09f9a9d7099b0ab346cc61b0c66fc56764a48 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 10:45:10 +0100 Subject: [PATCH 34/90] trafo credentials --- .../tr_asset_oid_workmate_asset_hierarchy.yaml | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml index 69e65c1ac..6e4625550 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml @@ -8,15 +8,14 @@ conflictMode: upsert # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: - clientId: {{clientId}} - clientSecret: {{clientSecret}} - tokenUri: {{tokenUri}} - # Optional: If idP requires providing the scopes + clientId: {{cicd_clientId}} + clientSecret: {{cicd_clientSecret}} + tokenUri: {{cicd_tokenUri}} + # Optional: If idP requires providing the cicd_scopes cdfProjectName: {{cdfProjectName}} - scopes: - - {{scopes}} - # Optional: If idP requires providing the audience - audience: {{audience}} + scopes: {{cicd_scopes}} + # Optional: If idP requires providing the cicd_audience + audience: {{cicd_audience}} schedule: # every hour interval: '0 * * * *' From 73e44cc65e30dd5ac171f90cccfc1d0caed2d1dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 10:48:58 +0100 Subject: [PATCH 35/90] lint --- cognite_toolkit/cdf_tk/load.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 285ce22df..ea9480217 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -884,7 +884,7 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path ) try: self.client.extraction_pipelines.config.create(extractionPipelineConfig) - + except Exception as e: print(f"[bold red]ERROR:[/] Failed to create extraction pipeline config.\n{e}") self.ToolGlobals.failed = True From 0983aaba2ce5f6f697d4f2de3b47da040e882fe8 Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Tue, 12 Dec 2023 11:18:52 +0100 Subject: [PATCH 36/90] Clean up print messages (#208) * refactor: Updated demo variables * refactor: missing config file * fix: Removed source ids * tests: fix test data * build: changelog * style: better printing * refactor: Refactor deploy_or_clean * tests: update snapshots * refactor: fix introduced bug * tests: regen test data * build; changelog * refacotr: Review feedback --- CHANGELOG.cdf-tk.md | 4 +- cognite_toolkit/cdf.py | 40 ++-- cognite_toolkit/cdf_tk/load.py | 173 +++++++++++++----- .../cdf_apm_simple_data_model.yaml | 22 +-- .../cdf_asset_source_model.yaml | 3 +- .../cdf_infield_location.yaml | 9 +- .../cdf_oid_example_data.yaml | 26 +-- .../example_pump_asset_hierarchy.yaml | 3 +- .../example_pump_data_model.yaml | 6 +- tests/test_cdf_tk/test_load.py | 6 +- 10 files changed, 182 insertions(+), 110 deletions(-) diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index 58d55fef1..8d7c9653e 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -21,6 +21,7 @@ Changes are grouped as follows: - Added support for validation of `space` for data models. - Check for whether template variables `` are present in the config files. - Check for whether data set id is present in the config files. +- Print table at the end of `cdf-tk deploy` with the resources that were created, deleted, and skipped. ### Removed - In the `deploy` command `drop_data` option has been removed. To drop data, use the `clean` command instead. ### Changed @@ -29,7 +30,8 @@ Changes are grouped as follows: under the `data_set_id` key. Note that you also need to explicitly define the `data_set` in its own yaml config file. - All config files have been merged to a single config file, `config.yaml`. Upon calling `cdf-tk init` the `config.yaml` is created in the root folder of the project based on the `default.config.yaml` file of each module. - +- DataSetID is no longer set implicitly when running the `cdf-tk deploy` command. Instead, the `data_set_id` must be + set explicitly in the yaml config file. ### Fixed - When running `cdf-tk deploy` with `--dry-run` a `ValueError` was raised if not all datasets were pre-existing. diff --git a/cognite_toolkit/cdf.py b/cognite_toolkit/cdf.py index 54b06ecb5..84ca0b882 100755 --- a/cognite_toolkit/cdf.py +++ b/cognite_toolkit/cdf.py @@ -22,7 +22,8 @@ from cognite_toolkit.cdf_tk.load import ( LOADER_BY_FOLDER_NAME, AuthLoader, - drop_load_resources, + DeployResults, + deploy_or_clean_resources, ) from cognite_toolkit.cdf_tk.templates import ( COGNITE_MODULES, @@ -276,41 +277,47 @@ def deploy( arguments = dict( ToolGlobals=ToolGlobals, drop=drop, - load=True, + action="deploy", dry_run=dry_run, drop_data=False, verbose=ctx.obj.verbose, ) - + results = DeployResults([], "deploy", dry_run=dry_run) if "auth" in include and (directory := (Path(build_dir) / "auth")).is_dir(): # First, we need to get all the generic access, so we can create the rest of the resources. print("[bold]EVALUATING auth resources with ALL scope...[/]") - drop_load_resources( + result = deploy_or_clean_resources( AuthLoader.create_loader(ToolGlobals, target_scopes="all_scoped_skipped_validation"), directory, **arguments, ) + results.append(result) if ToolGlobals.failed: print("[bold red]ERROR: [/] Failure to deploy auth as expected.") exit(1) for LoaderCls in TopologicalSorter(selected_loaders).static_order(): - drop_load_resources( + result = deploy_or_clean_resources( LoaderCls.create_loader(ToolGlobals), build_path / LoaderCls.folder_name, **arguments, ) + results.append(result) if ToolGlobals.failed: + if results: + print(results.create_rich_table()) print(f"[bold red]ERROR: [/] Failure to load {LoaderCls.folder_name} as expected.") exit(1) if "auth" in include and (directory := (Path(build_dir) / "auth")).is_dir(): # Last, we need to get all the scoped access, as the resources should now have been created. print("[bold]EVALUATING auth resources scoped to resources...[/]") - drop_load_resources( + result = deploy_or_clean_resources( AuthLoader.create_loader(ToolGlobals, target_scopes="resource_scoped_only"), directory, **arguments, ) + results.append(result) + print(results.create_rich_table()) if ToolGlobals.failed: print("[bold red]ERROR: [/] Failure to deploy auth as expected.") exit(1) @@ -396,35 +403,40 @@ def clean( if ToolGlobals.failed: print("[bold red]ERROR: [/] Failure to delete data models as expected.") exit(1) - + results = DeployResults([], "clean", dry_run=dry_run) for LoaderCls in reversed(list(TopologicalSorter(selected_loaders).static_order())): - drop_load_resources( + result = deploy_or_clean_resources( LoaderCls.create_loader(ToolGlobals), build_path / LoaderCls.folder_name, ToolGlobals, drop=True, - load=False, + action="clean", drop_data=True, dry_run=dry_run, verbose=ctx.obj.verbose, ) + results.append(result) if ToolGlobals.failed: + if results: + print(results.create_rich_table()) print(f"[bold red]ERROR: [/] Failure to clean {LoaderCls.folder_name} as expected.") exit(1) if "auth" in include and (directory := (Path(build_dir) / "auth")).is_dir(): - drop_load_resources( + result = deploy_or_clean_resources( AuthLoader.create_loader(ToolGlobals, target_scopes="all_scoped_skipped_validation"), directory, ToolGlobals, drop=True, clean=True, - load=False, + action="clean", dry_run=dry_run, verbose=ctx.obj.verbose, ) - if ToolGlobals.failed: - print("[bold red]ERROR: [/] Failure to clean auth as expected.") - exit(1) + results.append(result) + print(results.create_rich_table()) + if ToolGlobals.failed: + print("[bold red]ERROR: [/] Failure to clean auth as expected.") + exit(1) @auth_app.callback(invoke_without_command=True) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 944a297a5..7b0e74a0f 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -18,10 +18,11 @@ import json import re from abc import ABC, abstractmethod -from collections import Counter -from collections.abc import Sequence, Sized +from collections import Counter, UserList +from collections.abc import Iterable, Sequence, Sized from contextlib import suppress from dataclasses import dataclass +from functools import total_ordering from pathlib import Path from typing import Any, Generic, Literal, TypeVar, Union, final @@ -76,6 +77,7 @@ from cognite.client.data_classes.iam import Group, GroupList from cognite.client.exceptions import CogniteAPIError, CogniteDuplicatedError, CogniteNotFoundError from rich import print +from rich.table import Table from typing_extensions import Self from .delete import delete_instances @@ -367,6 +369,14 @@ def __init__( super().__init__(client, ToolGlobals) self.load = target_scopes + @property + def display_name(self): + if self.load.startswith("all"): + scope = "all" + else: + scope = "resource scoped" + return f"{self.api_name}({scope})" + @staticmethod def fixup_resource(local: T_Resource, remote: T_Resource) -> T_Resource: local.id = remote.id @@ -938,6 +948,7 @@ class SpaceLoader(Loader[str, SpaceApply, SpaceApplyList]): filename_pattern = r"^.*\.?(space)$" resource_cls = SpaceApply list_cls = SpaceApplyList + _display_name = "spaces" @classmethod def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> list[Capability]: @@ -983,6 +994,8 @@ class ContainerLoader(Loader[ContainerId, ContainerApply, ContainerApplyList]): list_cls = ContainerApplyList dependencies = frozenset({SpaceLoader}) + _display_name = "containers" + @classmethod def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: # Todo Scoped to spaces @@ -1016,6 +1029,8 @@ class ViewLoader(Loader[ViewId, ViewApply, ViewApplyList]): list_cls = ViewApplyList dependencies = frozenset({SpaceLoader, ContainerLoader}) + _display_name = "views" + @classmethod def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: # Todo Scoped to spaces @@ -1041,6 +1056,7 @@ class DataModelLoader(Loader[DataModelId, DataModelApply, DataModelApplyList]): resource_cls = DataModelApply list_cls = DataModelApplyList dependencies = frozenset({SpaceLoader, ViewLoader}) + _display_name = "data models" @classmethod def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: @@ -1159,17 +1175,64 @@ def create(self, items: Sequence[LoadableEdges], drop: bool, filepath: Path) -> return items -def drop_load_resources( +@total_ordering +@dataclass +class DeployResult: + name: str + created: int + deleted: int + skipped: int + total: int + + def __lt__(self, other): + return self.name < other.name + + def __eq__(self, other): + return self.name == other.name + + +class DeployResults(UserList): + def __init__(self, collection: Iterable[DeployResult], action: Literal["deploy", "clean"], dry_run: bool = False): + super().__init__(collection) + self.action = action + self.dry_run = dry_run + + def create_rich_table(self) -> Table: + table = Table(title=f"Summary of {self.action} command:") + prefix = "" + if self.dry_run: + prefix = "Would have " + table.add_column("Resource", justify="right") + table.add_column(f"{prefix}Created", justify="right", style="green") + table.add_column(f"{prefix}Deleted", justify="right", style="red") + table.add_column(f"{prefix}Skipped", justify="right", style="yellow") + table.add_column("Total", justify="right") + for item in sorted(self.data): + table.add_row( + item.name, + str(item.created), + str(item.deleted), + str(item.skipped), + str(item.total), + ) + + return table + + +def deploy_or_clean_resources( loader: Loader, path: Path, ToolGlobals: CDFToolConfig, drop: bool = False, clean: bool = False, - load: bool = True, + action: Literal["deploy", "clean"] = "deploy", dry_run: bool = False, drop_data: bool = False, verbose: bool = False, -): +) -> DeployResult: + if action not in ["deploy", "clean"]: + raise ValueError(f"Invalid action {action}") + if path.is_file(): if path.suffix not in loader.filetypes or not loader.filetypes: raise ValueError("Invalid file type") @@ -1189,29 +1252,29 @@ def drop_load_resources( nr_of_batches = len(items) nr_of_items = sum(len(item) if isinstance(item, Sized) else 1 for item in items) if nr_of_items == 0: - return - nr_of_deleted = 0 - nr_of_created = 0 - if load: - print(f"[bold]Uploading {nr_of_items} {loader.display_name} in {nr_of_batches} batches to CDF...[/]") + return DeployResult(name=loader.display_name, created=0, deleted=0, skipped=0, total=0) + if action == "deploy": + action_word = "Loading" if dry_run else "Uploading" + print(f"[bold]{action_word} {nr_of_items} {loader.display_name} in {nr_of_batches} batches to CDF...[/]") else: - print(f"[bold]Cleaning {nr_of_items} {loader.display_name} in {nr_of_batches} batches to CDF...[/]") + action_word = "Loading" if dry_run else "Cleaning" + print(f"[bold]{action_word} {nr_of_items} {loader.display_name} in {nr_of_batches} batches to CDF...[/]") batches = [item if isinstance(item, Sized) else [item] for item in items] - if drop and loader.support_drop and load: + if drop and loader.support_drop and action == "deploy": print(f" --drop is specified, will delete existing {loader.display_name} before uploading.") + + # Deleting resources. + nr_of_deleted = 0 if (drop and loader.support_drop) or clean: for batch in batches: - drop_items: list = [] - for item in batch: - # Set the context info for this CDF project - if hasattr(item, "data_set_id") and ToolGlobals.data_set_id is not None: - item.data_set_id = ToolGlobals.data_set_id - drop_items.append(loader.get_id(item)) - if not dry_run: + drop_items = [loader.get_id(item) for item in batch] + if dry_run: + nr_of_deleted += len(drop_items) + if verbose: + print(f" Would have deleted {len(drop_items)} {loader.display_name}.") + else: try: nr_of_deleted += loader.delete(drop_items, drop_data) - if verbose: - print(f" Deleted {len(drop_items)} {loader.display_name}.") except CogniteAPIError as e: if e.code == 404: print(f" [bold yellow]WARNING:[/] {len(drop_items)} {loader.display_name} do(es) not exist.") @@ -1219,37 +1282,57 @@ def drop_load_resources( print(f" [bold yellow]WARNING:[/] {len(drop_items)} {loader.display_name} do(es) not exist.") except Exception as e: print( - f" [bold yellow]WARNING:[/] Failed to delete {len(drop_items)} {loader.display_name}. Error {e}" + f" [bold yellow]WARNING:[/] Failed to delete {len(drop_items)} {loader.display_name}. Error {e}." ) - else: - print(f" Would have deleted {len(drop_items)} {loader.display_name}.") - if not load: - return - try: - if not dry_run: - for batch, filepath in zip(batches, filepaths): - if not drop and loader.support_upsert: + else: # Delete succeeded if verbose: - print(f" Comparing {len(batch)} {loader.display_name} from {filepath}...") - batch = loader.remove_unchanged(batch) - if verbose: - print(f" {len(batch)} {loader.display_name} to be deployed...") - if len(batch) > 0: + print(f" Deleted {len(drop_items)} {loader.display_name}.") + if dry_run and action == "clean" and verbose: + # Only clean command prints this, if not we print it at the end + print(f" Would have deleted {nr_of_deleted} {loader.display_name} in total.") + + if action == "clean": + # Clean Command, only delete. + return DeployResult(name=loader.display_name, created=0, deleted=nr_of_deleted, skipped=0, total=nr_of_items) + + nr_of_created = 0 + nr_of_skipped = 0 + for batch, filepath in zip(batches, filepaths): + if not drop and loader.support_upsert: + if verbose: + print(f" Comparing {len(batch)} {loader.display_name} from {filepath}...") + batch = loader.remove_unchanged(batch) + if verbose: + print(f" {len(batch)} {loader.display_name} to be deployed...") + + if batch: + if dry_run: + nr_of_created += len(batch) + else: + try: created = loader.create(batch, drop, filepath) - nr_of_created += len(created) if created is not None else 0 + except Exception as e: + print(f" [bold yellow]WARNING:[/] Failed to upload {loader.display_name}. Error {e}.") + ToolGlobals.failed = True + return + else: + newly_created = len(created) if created is not None else 0 + nr_of_created += newly_created + nr_of_skipped += len(batch) - newly_created if isinstance(loader, AuthLoader): nr_of_deleted += len(created) - except Exception as e: - print(f"[bold red]ERROR:[/] Failed to upload {loader.display_name}.") - print(e) - ToolGlobals.failed = True - return - if nr_of_deleted != 0: + if verbose: + prefix = "Would have" if dry_run else "" print( - f" Deleted {nr_of_deleted} out of {nr_of_items} {loader.display_name} from {len(filepaths)} config files." + f" {prefix} Created {nr_of_created}, Deleted {nr_of_deleted}, Skipped {nr_of_skipped}, Total {nr_of_items}." ) - - print(f" Created {nr_of_created} out of {nr_of_items} {loader.display_name} from {len(filepaths)} config files.") + return DeployResult( + name=loader.display_name, + created=nr_of_created, + deleted=nr_of_deleted, + skipped=nr_of_skipped, + total=nr_of_items, + ) LOADER_BY_FOLDER_NAME: dict[str, list[type[Loader]]] = {} diff --git a/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml b/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml index 22bdc8b6f..80291771b 100644 --- a/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml +++ b/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml @@ -331,8 +331,7 @@ Space: name: apm_simple space: apm_simple Transformation: -- dataSetId: 999 - destination: +- destination: dataModel: destinationRelationshipFromType: children destinationType: Asset @@ -368,7 +367,6 @@ Transformation: scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} - conflictMode: upsert - dataSetId: 999 destination: dataModel: destinationRelationshipFromType: null @@ -408,8 +406,7 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- dataSetId: 999 - destination: +- destination: dataModel: destinationRelationshipFromType: null destinationType: Asset @@ -443,8 +440,7 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- dataSetId: 999 - destination: +- destination: dataModel: destinationRelationshipFromType: null destinationType: WorkItem @@ -482,8 +478,7 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- dataSetId: 999 - destination: +- destination: dataModel: destinationRelationshipFromType: linkedAssets destinationType: WorkItem @@ -524,8 +519,7 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- dataSetId: 999 - destination: +- destination: dataModel: destinationRelationshipFromType: workItems destinationType: WorkOrder @@ -566,8 +560,7 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- dataSetId: 999 - destination: +- destination: dataModel: destinationRelationshipFromType: null destinationType: WorkOrder @@ -612,8 +605,7 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- dataSetId: 999 - destination: +- destination: dataModel: destinationRelationshipFromType: linkedAssets destinationType: WorkOrder diff --git a/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml b/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml index 12f72714c..0fb3042ec 100644 --- a/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml +++ b/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml @@ -30,8 +30,7 @@ Space: name: cdfTemplateInstances space: cdfTemplateInstances Transformation: -- dataSetId: 999 - destination: +- destination: dataModel: destinationRelationshipFromType: null destinationType: Asset diff --git a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml index 67d5ec900..723649fa8 100644 --- a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml +++ b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml @@ -371,8 +371,7 @@ Space: name: sp:infield:oid:source space: sp_asset_oid_source Transformation: -- dataSetId: 999 - destination: +- destination: instanceSpace: sp_asset_oid_source type: nodes view: @@ -407,8 +406,7 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- dataSetId: 999 - destination: +- destination: instanceSpace: sp_asset_oid_source type: nodes view: @@ -444,8 +442,7 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- dataSetId: 999 - destination: +- destination: dataModel: destinationRelationshipFromType: null destinationType: APM_Activity diff --git a/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml b/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml index aaf12b0fa..a394d3b10 100644 --- a/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml +++ b/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml @@ -17,7 +17,6 @@ DataSet: FileMetadata: - args: [] kwargs: - data_set_id: 999 external_id: fileshare_PH-25578-P-4110006-001.pdf name: PH-25578-P-4110006-001.pdf overwrite: true @@ -26,7 +25,6 @@ FileMetadata: name: PH-25578-P-4110006-001.pdf - args: [] kwargs: - data_set_id: 999 external_id: fileshare_PH-25578-P-4110010-001.pdf name: PH-25578-P-4110010-001.pdf overwrite: true @@ -35,7 +33,6 @@ FileMetadata: name: PH-25578-P-4110010-001.pdf - args: [] kwargs: - data_set_id: 999 external_id: fileshare_PH-25578-P-4110119-001.pdf name: PH-25578-P-4110119-001.pdf overwrite: true @@ -44,7 +41,6 @@ FileMetadata: name: PH-25578-P-4110119-001.pdf - args: [] kwargs: - data_set_id: 999 external_id: fileshare_PH-ME-P-0003-001.pdf name: PH-ME-P-0003-001.pdf overwrite: true @@ -53,7 +49,6 @@ FileMetadata: name: PH-ME-P-0003-001.pdf - args: [] kwargs: - data_set_id: 999 external_id: fileshare_PH-ME-P-0004-001.pdf name: PH-ME-P-0004-001.pdf overwrite: true @@ -62,7 +57,6 @@ FileMetadata: name: PH-ME-P-0004-001.pdf - args: [] kwargs: - data_set_id: 999 external_id: fileshare_PH-ME-P-0151-001.pdf name: PH-ME-P-0151-001.pdf overwrite: true @@ -71,7 +65,6 @@ FileMetadata: name: PH-ME-P-0151-001.pdf - args: [] kwargs: - data_set_id: 999 external_id: fileshare_PH-ME-P-0152-001.pdf name: PH-ME-P-0152-001.pdf overwrite: true @@ -80,7 +73,6 @@ FileMetadata: name: PH-ME-P-0152-001.pdf - args: [] kwargs: - data_set_id: 999 external_id: fileshare_PH-ME-P-0153-001.pdf name: PH-ME-P-0153-001.pdf overwrite: true @@ -89,7 +81,6 @@ FileMetadata: name: PH-ME-P-0153-001.pdf - args: [] kwargs: - data_set_id: 999 external_id: fileshare_PH-ME-P-0156-001.pdf name: PH-ME-P-0156-001.pdf overwrite: true @@ -98,7 +89,6 @@ FileMetadata: name: PH-ME-P-0156-001.pdf - args: [] kwargs: - data_set_id: 999 external_id: fileshare_PH-ME-P-0156-002.pdf name: PH-ME-P-0156-002.pdf overwrite: true @@ -107,7 +97,6 @@ FileMetadata: name: PH-ME-P-0156-002.pdf - args: [] kwargs: - data_set_id: 999 external_id: fileshare_PH-ME-P-0160-001.pdf name: PH-ME-P-0160-001.pdf overwrite: true @@ -256,7 +245,7 @@ Row: table_name: workorders name: workorder_oid_workmate_workorders_True TimeSeries: -- dataSetId: 999 +- dataSetId: 42 description: PH 1st Stg Disch Gas Coolers externalId: pi_160633 isStep: false @@ -314,7 +303,7 @@ TimeSeries: userreal2: '0' zero: '0' name: VAL_23-PDT-92602:X.Value -- dataSetId: 999 +- dataSetId: 42 description: PH 1stStgSuctCool Gas Out externalId: pi_160696 isStep: false @@ -372,7 +361,7 @@ TimeSeries: userreal2: '0' zero: '0' name: VAL_23-PT-92504:X.Value -- dataSetId: 999 +- dataSetId: 42 description: PH 1stStgComp Discharge externalId: pi_160702 isStep: false @@ -431,7 +420,7 @@ TimeSeries: zero: '0' name: VAL_23-PT-92536:X.Value unit: bar -- dataSetId: 999 +- dataSetId: 42 description: PH 1stStgSuctCool Gas Out Measured Value externalId: pi_160781 isStep: false @@ -490,7 +479,7 @@ TimeSeries: zero: '0' name: VAL_23-TIC-92504:Z.X.Value unit: degC -- dataSetId: 999 +- dataSetId: 42 description: PH 1stStgDiscCool CM Out externalId: pi_163582 isStep: false @@ -548,7 +537,7 @@ TimeSeries: userreal2: '0' zero: '0' name: VAL_45-PT-92608:X.Value -- dataSetId: 999 +- dataSetId: 42 description: PH 1stStgDiscClr CoolMed Sply externalId: pi_163657 isStep: false @@ -606,7 +595,7 @@ TimeSeries: userreal2: '0' zero: '0' name: VAL_45-TT-92606:X.Value -- dataSetId: 999 +- dataSetId: 42 description: PH 1stStgDiscCool Gas Out Output externalId: pi_191092 isStep: false @@ -667,7 +656,6 @@ TimeSeries: unit: degC Transformation: - conflictMode: upsert - dataSetId: 999 destination: type: asset_hierarchy destinationOidcCredentials: diff --git a/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml b/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml index 4c948107f..a356487d2 100644 --- a/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml +++ b/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml @@ -43,8 +43,7 @@ Row: table_name: collections_pump name: pump_assets_collections_pump_True Transformation: -- dataSetId: 999 - destination: +- destination: type: asset_hierarchy destinationOidcCredentials: audience: ${IDP_AUDIENCE} diff --git a/tests/test_approval_modules_snapshots/example_pump_data_model.yaml b/tests/test_approval_modules_snapshots/example_pump_data_model.yaml index 3efb1229a..e3ff12c36 100644 --- a/tests/test_approval_modules_snapshots/example_pump_data_model.yaml +++ b/tests/test_approval_modules_snapshots/example_pump_data_model.yaml @@ -65,8 +65,7 @@ Space: name: pumpModelSpace space: pumpModelSpace Transformation: -- dataSetId: 999 - destination: +- destination: dataModel: destinationRelationshipFromType: pumps destinationType: LiftStation @@ -102,8 +101,7 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- dataSetId: 999 - destination: +- destination: dataModel: destinationRelationshipFromType: null destinationType: Pump diff --git a/tests/test_cdf_tk/test_load.py b/tests/test_cdf_tk/test_load.py index b8039d6d5..bca592c9a 100644 --- a/tests/test_cdf_tk/test_load.py +++ b/tests/test_cdf_tk/test_load.py @@ -10,7 +10,7 @@ DataSetsLoader, FileLoader, Loader, - drop_load_resources, + deploy_or_clean_resources, ) from cognite_toolkit.cdf_tk.utils import CDFToolConfig @@ -35,7 +35,9 @@ def test_loader_class( cdf_tool.verify_capabilities.return_value = cognite_client_approval cdf_tool.data_set_id = 999 - drop_load_resources(loader_cls.create_loader(cdf_tool), directory, cdf_tool, drop=False, load=True, dry_run=False) + deploy_or_clean_resources( + loader_cls.create_loader(cdf_tool), directory, cdf_tool, drop=False, action="deploy", dry_run=False + ) dump = cognite_client_approval.dump() data_regression.check(dump, fullpath=SNAPSHOTS_DIR / f"{directory.name}.yaml") From 1c458ff71fd5bc51d71ded1e85979746cb4bf7fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 13:54:19 +0100 Subject: [PATCH 37/90] Support for non-yaml config files --- cognite_toolkit/cdf_tk/load.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index ea9480217..8397e1998 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -792,6 +792,7 @@ class ExtractionPipelineLoader(Loader[str, ExtractionPipeline, ExtractionPipelin support_drop = True api_name = "extraction_pipelines" folder_name = "extraction_pipelines" + filename_pattern = r'^(?:(?!\.config).)*$' # Matches all yaml files except config.yaml resource_cls = ExtractionPipeline list_cls = ExtractionPipelineList dependencies = frozenset({DataSetsLoader, RawLoader}) @@ -825,10 +826,9 @@ def delete(self, ids: Sequence[str], drop_data: bool) -> int: return 0 def load_resource(self, filepath: Path, dry_run: bool) -> ExtractionPipeline: - if filepath.name.endswith(".config.yaml"): - return None - + resource = load_yaml_inject_variables(filepath, {}) + if resource.get("dataSetExternalId") is not None: ds_external_id = resource.pop("dataSetExternalId") resource["dataSetId"] = self.ToolGlobals.verify_dataset(ds_external_id) if not dry_run else -1 @@ -855,19 +855,19 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path return None file_name = filepath.stem.split(".", 2)[1] - config_file_name = f"{file_name}.config.yaml" + config_file_stem = f"{file_name}.config" config_file = next( ( file for file in Path(filepath.parent).iterdir() - if file.is_file() and file.name.endswith(config_file_name) + if file.is_file() and config_file_stem in file.name ), None, ) if not config_file.exists(): print( - f" [bold yellow]WARNING:[/] no config file for extraction pipeline found. Expected to find {config_file_name} in same folder as {file_name}" + f" [bold yellow]WARNING:[/] no config file for extraction pipeline found. Expected to find {config_file_stem} in same folder as {file_name}" ) return extractionPipelineList @@ -879,7 +879,7 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path { "externalId": resource.get("externalId"), "description": resource.get("description"), - "config": yaml.dump(resource.get("config", ""), indent=4), + "config": yaml.dump(resource.get("config", ""), indent=4) if config_file.suffix == ".yaml" else str(resource.get("config", "")) } ) try: From b4e6cac3078a0275a510e6406475e1f3df364fb9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 13:56:47 +0100 Subject: [PATCH 38/90] ruff run --- cognite_toolkit/cdf_tk/load.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 8397e1998..52bb1d41f 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -792,7 +792,7 @@ class ExtractionPipelineLoader(Loader[str, ExtractionPipeline, ExtractionPipelin support_drop = True api_name = "extraction_pipelines" folder_name = "extraction_pipelines" - filename_pattern = r'^(?:(?!\.config).)*$' # Matches all yaml files except config.yaml + filename_pattern = r"^(?:(?!\.config).)*$" # Matches all yaml files except config.yaml resource_cls = ExtractionPipeline list_cls = ExtractionPipelineList dependencies = frozenset({DataSetsLoader, RawLoader}) @@ -826,9 +826,8 @@ def delete(self, ids: Sequence[str], drop_data: bool) -> int: return 0 def load_resource(self, filepath: Path, dry_run: bool) -> ExtractionPipeline: - resource = load_yaml_inject_variables(filepath, {}) - + if resource.get("dataSetExternalId") is not None: ds_external_id = resource.pop("dataSetExternalId") resource["dataSetId"] = self.ToolGlobals.verify_dataset(ds_external_id) if not dry_run else -1 @@ -857,11 +856,7 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path file_name = filepath.stem.split(".", 2)[1] config_file_stem = f"{file_name}.config" config_file = next( - ( - file - for file in Path(filepath.parent).iterdir() - if file.is_file() and config_file_stem in file.name - ), + (file for file in Path(filepath.parent).iterdir() if file.is_file() and config_file_stem in file.name), None, ) @@ -879,7 +874,9 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path { "externalId": resource.get("externalId"), "description": resource.get("description"), - "config": yaml.dump(resource.get("config", ""), indent=4) if config_file.suffix == ".yaml" else str(resource.get("config", "")) + "config": yaml.dump(resource.get("config", ""), indent=4) + if config_file.suffix == ".yaml" + else str(resource.get("config", "")), } ) try: From 0dc9906bb94aa68fd5d1783ff126d3184820d17b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 14:03:17 +0100 Subject: [PATCH 39/90] test regen --- .../cdf_data_pipeline_asset_valhall.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml index 6e3e53901..2b189bf86 100644 --- a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml +++ b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml @@ -87,7 +87,6 @@ Group: sourceId: Transformation: - conflictMode: upsert - dataSetId: 999 destination: type: asset_hierarchy destinationOidcCredentials: From ac0826d9ce9a8a9593dbf38461d55c0d1bf74bfc Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Tue, 12 Dec 2023 14:10:53 +0100 Subject: [PATCH 40/90] Fix Group Creation Bug (#212) * fix: all scope last * build: changelog --- CHANGELOG.cdf-tk.md | 2 ++ cognite_toolkit/cdf.py | 2 +- cognite_toolkit/cdf_tk/load.py | 24 +++++++++++++++--------- 3 files changed, 18 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index 8d7c9653e..19c92d677 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -36,6 +36,8 @@ Changes are grouped as follows: ### Fixed - When running `cdf-tk deploy` with `--dry-run` a `ValueError` was raised if not all datasets were pre-existing. This is now fixed by skipping dataset validation when running with `--dry-run`. +- When having a `auth` group with mixed capabilities of all scoped and resource scoped, the all scoped capabilities + were not removed when running `cdf-tk deploy`. This is now fixed. ## [0.1.0a3] - 2023-12-01 diff --git a/cognite_toolkit/cdf.py b/cognite_toolkit/cdf.py index 84ca0b882..7a73692b6 100755 --- a/cognite_toolkit/cdf.py +++ b/cognite_toolkit/cdf.py @@ -423,7 +423,7 @@ def clean( exit(1) if "auth" in include and (directory := (Path(build_dir) / "auth")).is_dir(): result = deploy_or_clean_resources( - AuthLoader.create_loader(ToolGlobals, target_scopes="all_scoped_skipped_validation"), + AuthLoader.create_loader(ToolGlobals, target_scopes="all"), directory, ToolGlobals, drop=True, diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 7b0e74a0f..9ae616afb 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -367,11 +367,11 @@ def __init__( ] = "all", ): super().__init__(client, ToolGlobals) - self.load = target_scopes + self.target_scopes = target_scopes @property def display_name(self): - if self.load.startswith("all"): + if self.target_scopes.startswith("all"): scope = "all" else: scope = "resource scoped" @@ -412,7 +412,10 @@ def load_resource(self, filepath: Path, dry_run: bool) -> Group: for capability in raw.get("capabilities", []): for _, values in capability.items(): if len(values.get("scope", {}).get("datasetScope", {}).get("ids", [])) > 0: - if not dry_run and self.load not in ["all_skipped_validation", "all_scoped_skipped_validation"]: + if not dry_run and self.target_scopes not in [ + "all_skipped_validation", + "all_scoped_skipped_validation", + ]: values["scope"]["datasetScope"]["ids"] = [ self.ToolGlobals.verify_dataset(ext_id) for ext_id in values.get("scope", {}).get("datasetScope", {}).get("ids", []) @@ -421,7 +424,10 @@ def load_resource(self, filepath: Path, dry_run: bool) -> Group: values["scope"]["datasetScope"]["ids"] = [-1] if len(values.get("scope", {}).get("extractionPipelineScope", {}).get("ids", [])) > 0: - if not dry_run and self.load not in ["all_skipped_validation", "all_scoped_skipped_validation"]: + if not dry_run and self.target_scopes not in [ + "all_skipped_validation", + "all_scoped_skipped_validation", + ]: values["scope"]["extractionPipelineScope"]["ids"] = [ self.ToolGlobals.verify_extraction_pipeline(ext_id) for ext_id in values.get("scope", {}).get("extractionPipelineScope", {}).get("ids", []) @@ -464,11 +470,11 @@ def delete(self, ids: Sequence[int], drop_data: bool) -> int: return len(found) def create(self, items: Sequence[Group], drop: bool, filepath: Path) -> GroupList: - if self.load == "all": + if self.target_scopes == "all": to_create = items - elif self.load == "all_skipped_validation": + elif self.target_scopes == "all_skipped_validation": raise ValueError("all_skipped_validation is not supported for group creation as scopes would be wrong.") - elif self.load == "resource_scoped_only": + elif self.target_scopes == "resource_scoped_only": to_create = [] for item in items: item.capabilities = [ @@ -476,7 +482,7 @@ def create(self, items: Sequence[Group], drop: bool, filepath: Path) -> GroupLis ] if item.capabilities: to_create.append(item) - elif self.load == "all_scoped_only" or self.load == "all_scoped_skipped_validation": + elif self.target_scopes == "all_scoped_only" or self.target_scopes == "all_scoped_skipped_validation": to_create = [] for item in items: item.capabilities = [ @@ -485,7 +491,7 @@ def create(self, items: Sequence[Group], drop: bool, filepath: Path) -> GroupLis if item.capabilities: to_create.append(item) else: - raise ValueError(f"Invalid load value {self.load}") + raise ValueError(f"Invalid load value {self.target_scopes}") if len(to_create) == 0: return [] From 70a683866d766daa72407b7d96656590150aafd0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 16:30:46 +0100 Subject: [PATCH 41/90] Update cognite_toolkit/cdf_tk/load.py Co-authored-by: Anders Albert <60234212+doctrino@users.noreply.github.com> --- cognite_toolkit/cdf_tk/load.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 83eb5d43d..bc024fde2 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -808,7 +808,7 @@ class ExtractionPipelineLoader(Loader[str, ExtractionPipeline, ExtractionPipelin support_drop = True api_name = "extraction_pipelines" folder_name = "extraction_pipelines" - filename_pattern = r"^(?:(?!\.config).)*$" # Matches all yaml files except config.yaml + filename_pattern = r"^(?:(?!\.config).)*$" # Matches all yaml files except file names that ends with config.yaml resource_cls = ExtractionPipeline list_cls = ExtractionPipelineList dependencies = frozenset({DataSetsLoader, RawLoader}) From 37b25fa33219566a9a34c2b231d0fd48776c0830 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 16:30:56 +0100 Subject: [PATCH 42/90] Update cognite_toolkit/cdf_tk/load.py Co-authored-by: Anders Albert <60234212+doctrino@users.noreply.github.com> --- cognite_toolkit/cdf_tk/load.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index bc024fde2..7c62cf7ed 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -854,7 +854,7 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path extractionPipelineList = None try: - extractionPipelineList = ExtractionPipelineList(self.client.extraction_pipelines.create(items)) + extraction_pipelines = self.client.extraction_pipelines.create(items) except CogniteDuplicatedError as e: if len(e.duplicated) < len(items): for dup in e.duplicated: From a167bcfdc92a61625dc183970613fbd3b3554e44 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 16:31:05 +0100 Subject: [PATCH 43/90] Update cognite_toolkit/cdf_tk/load.py Co-authored-by: Anders Albert <60234212+doctrino@users.noreply.github.com> --- cognite_toolkit/cdf_tk/load.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 7c62cf7ed..2dc5d486c 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -867,7 +867,7 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path except Exception as e: print(f"[bold red]ERROR:[/] Failed to create extraction pipelines.\n{e}") self.ToolGlobals.failed = True - return None + return ExtractionPipelineList([]) file_name = filepath.stem.split(".", 2)[1] config_file_stem = f"{file_name}.config" From 6e183e135ae685eacab80e72005291d365652f0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 16:31:13 +0100 Subject: [PATCH 44/90] Update cognite_toolkit/cdf_tk/load.py Co-authored-by: Anders Albert <60234212+doctrino@users.noreply.github.com> --- cognite_toolkit/cdf_tk/load.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 2dc5d486c..f868c0205 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -863,7 +863,7 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path if item.external_id == ext_id: items.remove(item) try: - extractionPipelineList = ExtractionPipelineList(self.client.extraction_pipelines.create(items)) + extractionPipelineList = self.client.extraction_pipelines.create(items) except Exception as e: print(f"[bold red]ERROR:[/] Failed to create extraction pipelines.\n{e}") self.ToolGlobals.failed = True From 8e82179283f01ea541ee28c449085a08a1c85fc0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 16:31:29 +0100 Subject: [PATCH 45/90] Update cognite_toolkit/cdf_tk/load.py Co-authored-by: Anders Albert <60234212+doctrino@users.noreply.github.com> --- cognite_toolkit/cdf_tk/load.py | 1 - 1 file changed, 1 deletion(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index f868c0205..4d27b366a 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -851,7 +851,6 @@ def load_resource(self, filepath: Path, dry_run: bool) -> ExtractionPipeline: return ExtractionPipeline.load(resource) def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path) -> ExtractionPipelineList: - extractionPipelineList = None try: extraction_pipelines = self.client.extraction_pipelines.create(items) From b528ff20cde8b87bc361fb32011a64b6a76ff35b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 16:31:38 +0100 Subject: [PATCH 46/90] Update cognite_toolkit/cdf_tk/load.py Co-authored-by: Anders Albert <60234212+doctrino@users.noreply.github.com> --- cognite_toolkit/cdf_tk/load.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 4d27b366a..aa912e74c 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -868,7 +868,7 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path self.ToolGlobals.failed = True return ExtractionPipelineList([]) - file_name = filepath.stem.split(".", 2)[1] + file_name =re.sub(r'^(\d+)\.', "", filepath.stem) config_file_stem = f"{file_name}.config" config_file = next( (file for file in Path(filepath.parent).iterdir() if file.is_file() and config_file_stem in file.name), From 3402ad710ab6aec23ab4af8707ce10e6e41567de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 16:32:14 +0100 Subject: [PATCH 47/90] Update cognite_toolkit/cdf_tk/load.py Co-authored-by: Anders Albert <60234212+doctrino@users.noreply.github.com> --- cognite_toolkit/cdf_tk/load.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index aa912e74c..99de36ea0 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -871,7 +871,7 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path file_name =re.sub(r'^(\d+)\.', "", filepath.stem) config_file_stem = f"{file_name}.config" config_file = next( - (file for file in Path(filepath.parent).iterdir() if file.is_file() and config_file_stem in file.name), + (file for file in Path(filepath.parent).iterdir() if file.is_file() and file.stem.endswith(config_file_stem)), None, ) From 185aa82826a650012a78fbac342cb648332e10c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 16:40:17 +0100 Subject: [PATCH 48/90] Updated according to PR review --- cognite_toolkit/cdf_tk/load.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 99de36ea0..683d65386 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -808,7 +808,7 @@ class ExtractionPipelineLoader(Loader[str, ExtractionPipeline, ExtractionPipelin support_drop = True api_name = "extraction_pipelines" folder_name = "extraction_pipelines" - filename_pattern = r"^(?:(?!\.config).)*$" # Matches all yaml files except file names that ends with config.yaml + filename_pattern = r"^(?:(?!\.config).)*$" # Matches all yaml files except file names who's stem contain *.config. resource_cls = ExtractionPipeline list_cls = ExtractionPipelineList dependencies = frozenset({DataSetsLoader, RawLoader}) @@ -851,7 +851,6 @@ def load_resource(self, filepath: Path, dry_run: bool) -> ExtractionPipeline: return ExtractionPipeline.load(resource) def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path) -> ExtractionPipelineList: - try: extraction_pipelines = self.client.extraction_pipelines.create(items) except CogniteDuplicatedError as e: @@ -862,16 +861,20 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path if item.external_id == ext_id: items.remove(item) try: - extractionPipelineList = self.client.extraction_pipelines.create(items) + extraction_pipelines = self.client.extraction_pipelines.create(items) except Exception as e: print(f"[bold red]ERROR:[/] Failed to create extraction pipelines.\n{e}") self.ToolGlobals.failed = True return ExtractionPipelineList([]) - file_name =re.sub(r'^(\d+)\.', "", filepath.stem) + file_name = re.sub(r"^(\d+)\.", "", filepath.stem) config_file_stem = f"{file_name}.config" config_file = next( - (file for file in Path(filepath.parent).iterdir() if file.is_file() and file.stem.endswith(config_file_stem)), + ( + file + for file in Path(filepath.parent).iterdir() + if file.is_file() and file.stem.endswith(config_file_stem) + ), None, ) @@ -879,7 +882,7 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path print( f" [bold yellow]WARNING:[/] no config file for extraction pipeline found. Expected to find {config_file_stem} in same folder as {file_name}" ) - return extractionPipelineList + return extraction_pipelines resources = load_yaml_inject_variables(config_file, {}) resources = [resources] if isinstance(resources, dict) else resources @@ -901,7 +904,7 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path print(f"[bold red]ERROR:[/] Failed to create extraction pipeline config.\n{e}") self.ToolGlobals.failed = True - return extractionPipelineList + return extraction_pipelines @final From 6825d4432abe2ddfc1e3fe8d011d4c3ba4e28343 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 20:00:29 +0100 Subject: [PATCH 49/90] Fixing logical breach --- cognite_toolkit/cdf_tk/load.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 683d65386..bedd38b51 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -892,9 +892,7 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path { "externalId": resource.get("externalId"), "description": resource.get("description"), - "config": yaml.dump(resource.get("config", ""), indent=4) - if config_file.suffix == ".yaml" - else str(resource.get("config", "")), + "config": yaml.dump(resource.get("config", ""), indent=4), } ) try: From d68ccdc0c0e79ed3d486bc0ca18cd9b7d5ffbf61 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Tue, 12 Dec 2023 21:10:18 +0100 Subject: [PATCH 50/90] Fixing style breach --- cognite_toolkit/cdf_tk/load.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index bedd38b51..1156fa13e 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -888,7 +888,7 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path resources = [resources] if isinstance(resources, dict) else resources for resource in resources: - extractionPipelineConfig = ExtractionPipelineConfig.load( + extraction_pipeline_config = ExtractionPipelineConfig.load( { "externalId": resource.get("externalId"), "description": resource.get("description"), @@ -896,7 +896,7 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path } ) try: - self.client.extraction_pipelines.config.create(extractionPipelineConfig) + self.client.extraction_pipelines.config.create(extraction_pipeline_config) except Exception as e: print(f"[bold red]ERROR:[/] Failed to create extraction pipeline config.\n{e}") From 17fec69ef362f7275161f282ee3fab170a45c2ab Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Wed, 13 Dec 2023 07:00:29 +0100 Subject: [PATCH 51/90] Validation resource type (#214) fix: build custom_module --- cognite_toolkit/cdf_tk/templates.py | 30 ++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/cognite_toolkit/cdf_tk/templates.py b/cognite_toolkit/cdf_tk/templates.py index 3215c0bdb..c48e3e76e 100644 --- a/cognite_toolkit/cdf_tk/templates.py +++ b/cognite_toolkit/cdf_tk/templates.py @@ -205,8 +205,10 @@ def _get_modules_and_packages(environment_file: Path, build_env: str) -> list[st def _read_packages(source_module, verbose): - cdf_modules_by_packages = read_yaml_file(source_module / DEFAULT_PACKAGES_FILE).get("packages", {}) - if (package_path := source_module / PACKAGES_FILE).exists(): + cdf_modules_by_packages = read_yaml_file(source_module / COGNITE_MODULES / DEFAULT_PACKAGES_FILE).get( + "packages", {} + ) + if (package_path := source_module / COGNITE_MODULES / PACKAGES_FILE).exists(): local_modules_by_packages = read_yaml_file(package_path).get("packages", {}) if overwrites := set(cdf_modules_by_packages.keys()) & set(local_modules_by_packages.keys()): print( @@ -484,9 +486,8 @@ def build_config( print(" [bold yellow]WARNING:[/] Build directory is not empty. Use --clean to remove existing files.") else: build_dir.mkdir() - source_module_dir = source_dir / COGNITE_MODULES - selected_modules = get_selected_modules(source_module_dir, environment_file, build_env, verbose) + selected_modules = get_selected_modules(source_dir, environment_file, build_env, verbose) config = read_yaml_file(config_file) warnings = validate_config_yaml(config, config_file) @@ -494,7 +495,7 @@ def build_config( print(" [bold yellow]WARNING:[/] Found the following warnings in config.yaml:") for warning in warnings: print(f" {warning}") - process_config_files(source_module_dir, selected_modules, build_dir, config, build_env, verbose) + process_config_files(source_dir, selected_modules, build_dir, config, build_env, verbose) def generate_config( @@ -716,9 +717,13 @@ def iterate_modules(root_dir: Path) -> tuple[Path, list[Path]]: if not module_dir.is_dir(): continue module_directories = [path for path in module_dir.iterdir() if path.is_dir()] - is_all_resource_directories = all(dir.name in LOADER_BY_FOLDER_NAME for dir in module_directories) - if module_directories and is_all_resource_directories: - yield module_dir, [path for path in module_dir.rglob("*") if path.is_file() and path.name not in EXCL_FILES] + is_any_resource_directories = any(dir.name in LOADER_BY_FOLDER_NAME for dir in module_directories) + if module_directories and is_any_resource_directories: + yield module_dir, [ + path + for path in module_dir.rglob("*") + if path.is_file() and path.name not in EXCL_FILES and path.parent != module_dir + ] def create_local_config(config: dict[str, Any], module_dir: Path) -> Mapping[str, str]: @@ -795,12 +800,19 @@ def validate(content: str, destination: Path, source_path: Path) -> None: filepath_build=destination, ): exit(1) - loader = LOADER_BY_FOLDER_NAME.get(destination.parent.name) + loader = LOADER_BY_FOLDER_NAME.get(destination.parent.name, []) if len(loader) == 1: loader = loader[0] else: loader = next((loader for loader in loader if re.match(loader.filename_pattern, destination.stem)), None) + if loader is None: + print( + f" [bold yellow]WARNING:[/] In module {source_path.parent.parent.name!r}, the resource {destination.parent.name!r} is not supported by the toolkit." + ) + print(f" Available resources are: {', '.join(LOADER_BY_FOLDER_NAME.keys())}") + return + if loader: load_warnings = validate_case_raw( parsed, loader.resource_cls, destination, identifier_key=loader.identifier_key From 8875e363488e624f7e23eae06bd9f5be9bd186b1 Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Wed, 13 Dec 2023 09:29:07 +0100 Subject: [PATCH 52/90] Pretty Config File (#213) * refactor: reorder config * refactor; Added some comments * refactor: restructure gnerate config * build: removed ruamel-yaml * tests; adding failing test * refactor: extract and dump with comments * tests: updated test data * refactor; Robustifying (allow . in module names) --- cognite_toolkit/cdf_tk/templates.py | 149 +++++++++++++----- .../cognite_modules/default.config.yaml | 8 +- cognite_toolkit/config.yaml | 126 ++++++++------- .../my_example_module/default.config.yaml | 2 +- poetry.lock | 71 +-------- pyproject.toml | 1 - tests/test_cdf_tk/test_templates.py | 74 ++++++++- 7 files changed, 259 insertions(+), 172 deletions(-) diff --git a/cognite_toolkit/cdf_tk/templates.py b/cognite_toolkit/cdf_tk/templates.py index c48e3e76e..26ac48242 100644 --- a/cognite_toolkit/cdf_tk/templates.py +++ b/cognite_toolkit/cdf_tk/templates.py @@ -1,6 +1,5 @@ from __future__ import annotations -import io import itertools import os import re @@ -13,7 +12,6 @@ import yaml from rich import print -from ruamel.yaml import YAML, CommentedMap from cognite_toolkit.cdf_tk.load import LOADER_BY_FOLDER_NAME from cognite_toolkit.cdf_tk.utils import validate_case_raw, validate_config_yaml, validate_data_set_is_set @@ -513,8 +511,6 @@ def generate_config( Returns: A config dictionary. """ - yaml_loader = YAML() - config = (existing_config and yaml_loader.load(existing_config)) or CommentedMap() if not directory.exists(): raise ValueError(f"Directory {directory} does not exist") entries = ConfigEntries((existing_config and yaml.safe_load(existing_config)) or None) @@ -522,20 +518,27 @@ def generate_config( directories = [directory] else: directories = directory - + config = {} + comments: dict[str, dict[Literal["above", "after"], list[str]]] = {} for dir_ in directories: defaults = sorted(directory.glob(f"**/{DEFAULT_CONFIG_FILE}"), key=lambda f: f.relative_to(dir_)) for default_config in defaults: if include_modules is not None and default_config.parent.name not in include_modules: continue - file_data = yaml_loader.load(default_config.read_text()) + raw_file = default_config.read_text() + + comments.update( + _extract_comments(raw_file, key_prefix=tuple(default_config.parent.relative_to(directory).parts)) + ) + + file_data = yaml.safe_load(raw_file) parts = default_config.relative_to(directory).parent.parts if len(parts) == 0: # This is a root config file - for key, value in file_data.items(): - config[key] = value - entries.append( + config.update(file_data) + entries.extend( + [ ConfigEntry( key=key, module="", @@ -543,17 +546,20 @@ def generate_config( last_value=None, current_value=value, ) - ) + for key, value in file_data.items() + ] + ) continue local_config = config for key in parts: if key not in local_config: - local_config[key] = CommentedMap() + local_config[key] = {} local_config = local_config[key] - for key, value in file_data.items(): - local_config[key] = value - entries.append( + local_config.update(file_data) + + entries.extend( + [ ConfigEntry( key=key, module=default_config.parent.name, @@ -561,34 +567,95 @@ def generate_config( last_value=None, current_value=value, ) - ) - for removed in entries.removed: - parts = removed.path.split(".") - parts.append(removed.module) - local_config = config - last_config = None - for key in parts: - last_config = local_config - local_config = local_config[key] - del local_config[removed.key] - if not local_config: - del last_config[removed.module] - - output = io.StringIO() - yaml_loader.dump(config, output) - output_yaml = output.getvalue() - # Indent comments - output_lines = [] - leading_spaces = 0 - for line in output_yaml.splitlines(): - if line.lstrip().startswith("#"): - line = f"{' '*leading_spaces}{line}" - else: - leading_spaces = len(line) - len(line.lstrip()) - output_lines.append(line) + for key, value in file_data.items() + ] + ) + + config = _reorder_config_yaml(config) + output_yaml = _dump_yaml_with_comments(config, comments) return output_yaml, entries +def _reorder_config_yaml(config: dict[str, Any]) -> dict[str, Any]: + """Reorder the config.yaml file to have the keys in alphabetical order + and the variables before the modules. + """ + new_config = {} + for key in sorted([k for k in config.keys() if not isinstance(config[k], dict)]): + new_config[key] = config[key] + for key in sorted([k for k in config.keys() if isinstance(config[k], dict)]): + new_config[key] = _reorder_config_yaml(config[key]) + return new_config + + +def _extract_comments( + raw_file: str, key_prefix: tuple[str, ...] = tuple() +) -> dict[tuple[str, ...], dict[Literal["above", "after"], list[str]]]: + """Extract comments from a raw file and return a dictionary with the comments.""" + comments: dict[tuple[str, ...], dict[Literal["above", "after"], list[str]]] = defaultdict( + lambda: {"above": [], "after": []} + ) + position: Literal["above", "after"] + variable: str | None = None + last_comment: str | None = None + for line in raw_file.splitlines(): + if ":" in line: + variable = str(line.split(":", maxsplit=1)[0].strip()) + if last_comment: + comments[(*key_prefix, variable)]["above"].append(last_comment) + last_comment = None + if "#" in line: + before, comment = str(line).rsplit("#", maxsplit=1) + position = "after" if ":" in before else "above" + if position == "after" and (before.count('"') % 2 == 1 or before.count("'") % 2 == 1): + # The comment is inside a string + continue + if position == "after" or variable is None: + key = (*key_prefix, *((variable and [variable]) or [])) + comments[key][position].append(comment.strip()) + else: + last_comment = comment.strip() + return dict(comments) + + +def _dump_yaml_with_comments( + config: dict[str, Any], + comments: dict[tuple[str, ...], dict[Literal["above", "after"], list[str]]], + indent_size: int = 2, +) -> str: + """Dump a config dictionary to a yaml string""" + dumped = yaml.dump(config, sort_keys=False, indent=indent_size) + out_lines = [] + if module_comment := comments.get(tuple()): + for comment in module_comment["above"]: + out_lines.append(f"# {comment}") + last_indent = 0 + last_variable: str | None = None + path: tuple[str, ...] = tuple() + for line in dumped.splitlines(): + indent = len(line) - len(line.lstrip()) + if last_indent < indent: + path = (*path, last_variable) + elif last_indent > indent: + # Adding some extra space between modules + out_lines.append("") + indent_reduction_steps = (last_indent - indent) // indent_size + path = path[:-indent_reduction_steps] + + variable = line.split(":", maxsplit=1)[0].strip() + if comment := comments.get((*path, variable)): + for line_comment in comment["above"]: + out_lines.append(f"{' ' * indent}# {line_comment}") + if after := comment["after"]: + line = f"{line} # {after[0]}" + + out_lines.append(line) + last_indent = indent + last_variable = variable + out_lines.append("") + return "\n".join(out_lines) + + @dataclass class ConfigEntries(UserList): def __init__(self, entries: list[ConfigEntry] | dict | None = None): @@ -631,6 +698,10 @@ def append(self, item: ConfigEntry) -> None: else: self._lookup[item.module][item.key].current_value = item.current_value + def extend(self, items: list[ConfigEntry]) -> None: + for item in items: + self.append(item) + @property def changed(self) -> list[ConfigEntry]: return [entry for entry in self if entry.changed] diff --git a/cognite_toolkit/cognite_modules/default.config.yaml b/cognite_toolkit/cognite_modules/default.config.yaml index 01e39e685..65e1ca5cb 100644 --- a/cognite_toolkit/cognite_modules/default.config.yaml +++ b/cognite_toolkit/cognite_modules/default.config.yaml @@ -1,13 +1,11 @@ -# DO NOT EDIT THIS FILE! -# You can override these variables by creating a config.yaml file in the same directory as this file. -# -# Globally available variables that can be used by using {{variable_name}} in your yaml files. +# Globally variables are available to all submodules +# of the cognite_modules. cdf_cluster: ${CDF_CLUSTER} cicd_clientId: ${IDP_CLIENT_ID} cicd_clientSecret: ${IDP_CLIENT_SECRET} cicd_tokenUri: ${IDP_TOKEN_URL} -# Optional: If idP requires providing the scopes cdfProjectName: ${CDF_PROJECT} +# Optional: If idP requires providing the scopes cicd_scopes: - ${IDP_SCOPES} # Optional: If idP requires providing the audience diff --git a/cognite_toolkit/config.yaml b/cognite_toolkit/config.yaml index d66b6f959..43f3ce4b4 100644 --- a/cognite_toolkit/config.yaml +++ b/cognite_toolkit/config.yaml @@ -1,79 +1,114 @@ +# Globally variables are available to all submodules +# of the cognite_modules. cognite_modules: + cdfProjectName: ${CDF_PROJECT} + cdf_cluster: ${CDF_CLUSTER} + # Optional: If idP requires providing the audience + cicd_audience: ${IDP_AUDIENCE} + cicd_clientId: ${IDP_CLIENT_ID} + cicd_clientSecret: ${IDP_CLIENT_SECRET} + # Optional: If idP requires providing the scopes + cicd_scopes: + - ${IDP_SCOPES} + cicd_tokenUri: ${IDP_TOKEN_URL} + # Daily at 1:35 AM + scheduleDaily: 35 1 * * * + # Seven minutes past each hour + scheduleHourly: 7 * * * * + # Every fifteen minutes + scheduleQuarterly: 0/15 * * * * common: cdf_auth_readwrite_all: - readwrite_source_id: readonly_source_id: + readwrite_source_id: + + core: + cdf_apm_base: + apm_datamodel_space: APM_SourceData + apm_datamodel_version: '1' + examples: - cdf_apm_simple_data_model: # Values here are only valid for this module. # The raw database values here point to the RAW data loaded in the cdf_oid_example_data # module. If you have changed the default values in that module, you need to change them here as well. + cdf_apm_simple_data_model: + datamodel: apm_simple + datamodel_version: '1' default_location: oid + pause_transformations: true source_asset: workmate - source_workorder: workmate source_timeseries: pi - datamodel: apm_simple + source_workorder: workmate space: apm_simple - datamodel_version: '1' view_Asset_version: '1' - view_WorkOrder_version: '1' view_WorkItem_version: '1' - pause_transformations: true - cdf_oid_example_data: + view_WorkOrder_version: '1' + # Only valid for this module, loads template variables from environment - # - # In the example below we are setting up a project based on the Open Industry Data (OID), + # + # In the example below we are setting up a project based on the Open Industry Data (OID), # that originates from the Valhall oil rig. Note that the location/site is NOT used # to structure the data when on-boarding. The expectation is that a single source system # and it's data pipeline may supply data for multiple locations/sites. # The structuring of the data based on site/location should happen as part of processing # the data in CDF, i.e. contextualisation. - # + # # Each of data resource types have assigned the source system where the data originates from. # This information will be used to construct RAW database names, and to create data sets in CDF, # and can be used to control access. + cdf_oid_example_data: default_location: oid source_asset: workmate - source_workorder: workmate source_files: fileshare source_timeseries: pi - example_pump_asset_hierarchy: + source_workorder: workmate + # Only valid for this module, loads template variables from environment - raw_db: pump_assets + example_pump_asset_hierarchy: data_set: src:lift_pump_stations + raw_db: pump_assets + experimental: - cdf_asset_source_model: # Only valid for this module, loads template variables from environment - model_space: ExtendedSourceDataModels - instance_space: cdfTemplateInstances - view_asset_version: '1' + cdf_asset_source_model: data_model_version: '1' + instance_space: cdfTemplateInstances + model_space: ExtendedSourceDataModels root_asset_external_id: lift_pump_stations:root - example_pump_data_model: + view_asset_version: '1' + # Only valid for this module, loads template variables from environment - model_space: pumpModelSpace + example_pump_data_model: + data_model: PumpLiftStations + data_model_version: '1' instance_space: pumpInstanceSpace - source_model_space: ExtendedSourceDataModels + model_space: pumpModelSpace source_model: ExtendedSourceData - view_Pump_version: '1' + source_model_space: ExtendedSourceDataModels view_LiftStation_version: '1' - data_model_version: '1' - data_model: PumpLiftStations + view_Pump_version: '1' + infield: cdf_infield_common: applicationsconfiguration_source_id: - cdf_infield_location: + # This default_location points to the location created by the cdf_oid_example_data module. # When you create your own location by copying the cdf_oid_example_data module to # set up data sets and groups, the below needs to refer to the location to define. - # - default_location: oid - module_version: '1' - apm_datamodel_space: APM_SourceData + # + cdf_infield_location: apm_app_config_external_id: default-infield-config-minimal apm_config_instance_space: APM_Config - # RAW databases to load workorders and other workorder data from - # The below values point to the RAW database in the cdf_oid_example_data and should be + apm_datamodel_space: APM_SourceData + default_location: oid + # infield and must be updated for each location + infield_default_location_checklist_admin_users_source_id: + infield_default_location_normal_users_source_id: + infield_default_location_template_admin_users_source_id: + infield_default_location_viewer_users_source_id: + module_version: '1' + # the root asset for this location, needs to be updated for each location + root_asset_external_id: WMT:VAL # changed if you want to load workorders from another RAW database. source_asset: workmate source_workorder: workmate @@ -81,33 +116,6 @@ cognite_modules: # The table name in the raw_db database that has workorder data workorder_table_name: workorders -# the root asset for this location, needs to be updated for each location - root_asset_external_id: WMT:VAL - -# the following properties are required for -# infield and must be updated for each location - infield_default_location_checklist_admin_users_source_id: - infield_default_location_normal_users_source_id: - infield_default_location_template_admin_users_source_id: - infield_default_location_viewer_users_source_id: - -# Transformation credentials - cdf_cluster: ${CDF_CLUSTER} - cicd_clientId: ${IDP_CLIENT_ID} - cicd_clientSecret: ${IDP_CLIENT_SECRET} - cicd_tokenUri: ${IDP_TOKEN_URL} - cdfProjectName: ${CDF_PROJECT} - cicd_scopes: - - ${IDP_SCOPES} -# Optional: If idP requires providing the audience - cicd_audience: ${IDP_AUDIENCE} - core: - cdf_apm_base: - apm_datamodel_space: APM_SourceData - apm_datamodel_version: '1' - scheduleHourly: 7 * * * * - scheduleQuarterly: 0/15 * * * * - scheduleDaily: 35 1 * * * custom_modules: my_example_module: - example_variable: demo_dataset + example_variable: demo_dataset # This is the variable that will be used in the module diff --git a/cognite_toolkit/custom_modules/my_example_module/default.config.yaml b/cognite_toolkit/custom_modules/my_example_module/default.config.yaml index 861221be8..581f713e3 100644 --- a/cognite_toolkit/custom_modules/my_example_module/default.config.yaml +++ b/cognite_toolkit/custom_modules/my_example_module/default.config.yaml @@ -1 +1 @@ -example_variable: 'demo_dataset' +example_variable: 'demo_dataset' # This is the variable that will be used in the module diff --git a/poetry.lock b/poetry.lock index 572f5f846..ec08178ad 100644 --- a/poetry.lock +++ b/poetry.lock @@ -212,13 +212,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "cognite-sdk" -version = "7.5.4" +version = "7.5.6" description = "Cognite Python SDK" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "cognite_sdk-7.5.4-py3-none-any.whl", hash = "sha256:7c9f87ff81565e284630c9b6b7c8b3c642a98221f89a9b4894e53cf7bb5f9ca8"}, - {file = "cognite_sdk-7.5.4.tar.gz", hash = "sha256:489a121278c68c27993f4e610577e52d93f1028a30ce3e9c05f5c9f89568b196"}, + {file = "cognite_sdk-7.5.6-py3-none-any.whl", hash = "sha256:11f0a5bc83e9a753386ef3d381c23291e9c33447afd21d2535da1daf6a5e95e8"}, + {file = "cognite_sdk-7.5.6.tar.gz", hash = "sha256:529d009831af63d99f1a835e67509e280280e2233dd8c4ceb3f0e8e2e8d79801"}, ] [package.dependencies] @@ -312,13 +312,13 @@ dev = ["black", "coveralls", "mypy", "pre-commit", "pylint", "pytest (>=5)", "py [[package]] name = "distlib" -version = "0.3.7" +version = "0.3.8" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, - {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, ] [[package]] @@ -1361,63 +1361,6 @@ pygments = ">=2.13.0,<3.0.0" [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] -[[package]] -name = "ruamel-yaml" -version = "0.18.5" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruamel.yaml-0.18.5-py3-none-any.whl", hash = "sha256:a013ac02f99a69cdd6277d9664689eb1acba07069f912823177c5eced21a6ada"}, - {file = "ruamel.yaml-0.18.5.tar.gz", hash = "sha256:61917e3a35a569c1133a8f772e1226961bf5a1198bea7e23f06a0841dea1ab0e"}, -] - -[package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} - -[package.extras] -docs = ["mercurial (>5.7)", "ryd"] -jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] - -[[package]] -name = "ruamel-yaml-clib" -version = "0.2.8" -description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -optional = false -python-versions = ">=3.6" -files = [ - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, - {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, - {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, -] - [[package]] name = "secretstorage" version = "3.3.3" @@ -1626,4 +1569,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "53b6797631963ab474c52c78f19733be14b6a12eb9ac5763f5066cfcfa332050" +content-hash = "396847f137a1189d4b19d6867fee9bb90bf984866586848499777ade8450b28e" diff --git a/pyproject.toml b/pyproject.toml index be2a5ba1c..8b6a82319 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,6 @@ regex = "^2023.6.3" chardet = "^5.1.0" typer = {version = "^0.9.0", extras = ["all"]} pytest-icdiff = "*" # Used for better diffs in pytest -'ruamel.yaml' = "^0.18" # Used for perserving comments in yaml files [tool.poetry.group.dev.dependencies] mypy = "^1.7.1" diff --git a/tests/test_cdf_tk/test_templates.py b/tests/test_cdf_tk/test_templates.py index bf7171da5..5e894783f 100644 --- a/tests/test_cdf_tk/test_templates.py +++ b/tests/test_cdf_tk/test_templates.py @@ -8,6 +8,8 @@ from cognite_toolkit.cdf_tk.templates import ( COGNITE_MODULES, + _dump_yaml_with_comments, + _extract_comments, create_local_config, generate_config, split_config, @@ -36,7 +38,7 @@ def generate_config_test_cases(): }, } - yield pytest.param(yaml.safe_dump(expected, sort_keys=False), None, id="Include all") + yield pytest.param(expected, None, id="Include all") only_a_module = { COGNITE_MODULES: { @@ -46,7 +48,7 @@ def generate_config_test_cases(): }, } } - yield pytest.param(yaml.safe_dump(only_a_module, sort_keys=False), {"a_module"}, id="Include one module") + yield pytest.param(only_a_module, {"a_module"}, id="Include one module") @pytest.mark.parametrize( @@ -56,7 +58,7 @@ def generate_config_test_cases(): def test_generate_config(expected: str, include: set[str] | None) -> None: actual, _ = generate_config(BUILD_CONFIG, include_modules=include) - assert actual == expected + assert yaml.safe_load(actual) == expected @pytest.fixture() @@ -91,3 +93,69 @@ def test_create_local_config(my_config: dict[str, Any]): local_config = create_local_config(configs, Path("parent/child/auth/")) assert dict(local_config.items()) == {"top_variable": "my_top_variable", "child_variable": "my_child_variable"} + + +@pytest.mark.parametrize( + "raw_file, key_prefix, expected_comments", + [ + pytest.param( + """# This is a module comment +variable: value # After variable comment +# Before variable comment +variable2: value2 +variable3: 'value with #in it' +variable4: "value with #in it" # But a comment after +""", + tuple("super_module.module_a".split(".")), + { + ("super_module", "module_a"): {"above": ["This is a module comment"], "after": []}, + ("super_module", "module_a", "variable"): {"above": [], "after": ["After variable comment"]}, + ("super_module", "module_a", "variable2"): {"above": ["Before variable comment"], "after": []}, + ("super_module", "module_a", "variable4"): {"above": [], "after": ["But a comment after"]}, + }, + id="module comments", + ) + ], +) +def test_extract_comments(raw_file: str, key_prefix: tuple[str, ...], expected_comments: dict[str, Any]): + actual_comments = _extract_comments(raw_file, key_prefix) + assert actual_comments == expected_comments + + +@pytest.mark.parametrize( + "config, comments, expected", + [ + pytest.param( + { + "top_variable": "my_top_variable", + "module_a": { + "readwrite_source_id": "my_readwrite_source_id", + "readonly_source_id": "my_readonly_source_id", + }, + "parent": {"child": {"child_variable": "my_child_variable"}}, + }, + { + tuple(): {"above": ["This is a module comment"], "after": []}, + ("top_variable",): {"above": [], "after": ["After variable comment"]}, + ("module_a",): {"above": ["Before variable comment"], "after": []}, + ("parent", "child", "child_variable"): {"above": [], "after": ["With a comment after"]}, + }, + """# This is a module comment +top_variable: my_top_variable # After variable comment +# Before variable comment +module_a: + readwrite_source_id: my_readwrite_source_id + readonly_source_id: my_readonly_source_id + +parent: + child: + child_variable: my_child_variable # With a comment after +""", + id="Config with comments", + ) + ], +) +def test_dump_yaml_with_comments(config: dict[str, Any], comments: dict[tuple[str, ...], Any], expected: str): + actual = _dump_yaml_with_comments(config, comments) + + assert actual == expected From 24fa904fb0a901a760e84af569e9fdf3c645c6ba Mon Sep 17 00:00:00 2001 From: Greger Wedel Date: Wed, 13 Dec 2023 09:30:37 +0100 Subject: [PATCH 53/90] Fix schedule and clean up use of ticks in yaml, added variable for pausing transformation --- .../cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml | 6 +++--- .../cdf_data_pipeline_asset_valhall/default.config.yaml | 1 + .../tr_asset_oid_workmate_asset_hierarchy.yaml | 4 ++-- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml index e368feca6..72623c234 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/data_sets/dataset.yaml @@ -4,10 +4,10 @@ description: 'Asset data for {{location_name}}' metadata: consoleSource: names: - - "{{source_name}}" + - '{{source_name}}' rawTables: - databaseName: asset_{{location_name}}_{{source_name}} - tableName: "assets" + tableName: 'assets' transformations: - externalId: tr_asset_{{location_name}}_{{source_name}}_asset_hierarchy - type: "Transformations" + type: 'Transformations' diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/default.config.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/default.config.yaml index 316727a22..8b1ba78bd 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/default.config.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/default.config.yaml @@ -19,6 +19,7 @@ asset_location_extractor_group_source_id: asset_location_processing_group_source_id: asset_location_read_group_source_id: +pause_transformations: true # Transformation credentials clientId: ${IDP_CLIENT_ID} diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml index 6e4625550..38c321801 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml @@ -18,5 +18,5 @@ authentication: audience: {{cicd_audience}} schedule: # every hour - interval: '0 * * * *' - isPaused: true \ No newline at end of file + interval: '{{scheduleHourly}}' + isPaused: {{pause_transformations}} \ No newline at end of file From 071fa22c9bb1e4fa93a727c71ca13ab1bb5c5794 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Wed, 13 Dec 2023 11:02:02 +0100 Subject: [PATCH 54/90] Refactor to separate ExtractionPipelineConfigLoader --- cognite_toolkit/cdf_tk/load.py | 72 ++++++++++--------- cognite_toolkit/config.yaml | 1 + cognite_toolkit/environments.yaml | 9 +-- .../cdf_data_pipeline_asset_valhall.yaml | 4 +- 4 files changed, 46 insertions(+), 40 deletions(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 1156fa13e..2c5588247 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -852,7 +852,7 @@ def load_resource(self, filepath: Path, dry_run: bool) -> ExtractionPipeline: def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path) -> ExtractionPipelineList: try: - extraction_pipelines = self.client.extraction_pipelines.create(items) + return self.client.extraction_pipelines.create(items) except CogniteDuplicatedError as e: if len(e.duplicated) < len(items): for dup in e.duplicated: @@ -861,48 +861,56 @@ def create(self, items: Sequence[ExtractionPipeline], drop: bool, filepath: Path if item.external_id == ext_id: items.remove(item) try: - extraction_pipelines = self.client.extraction_pipelines.create(items) + return self.client.extraction_pipelines.create(items) except Exception as e: print(f"[bold red]ERROR:[/] Failed to create extraction pipelines.\n{e}") self.ToolGlobals.failed = True return ExtractionPipelineList([]) - file_name = re.sub(r"^(\d+)\.", "", filepath.stem) - config_file_stem = f"{file_name}.config" - config_file = next( - ( - file - for file in Path(filepath.parent).iterdir() - if file.is_file() and file.stem.endswith(config_file_stem) - ), - None, - ) - if not config_file.exists(): - print( - f" [bold yellow]WARNING:[/] no config file for extraction pipeline found. Expected to find {config_file_stem} in same folder as {file_name}" - ) - return extraction_pipelines +@final +class ExtractionPipelineConfigLoader(Loader[str, ExtractionPipelineConfig, list[ExtractionPipelineConfig]]): + support_drop = True + api_name = "extraction_pipelines.config" + folder_name = "extraction_pipelines" + filename_pattern = r"^.*\.config$" + resource_cls = ExtractionPipelineConfig + dependencies = frozenset({ExtractionPipelineLoader}) - resources = load_yaml_inject_variables(config_file, {}) - resources = [resources] if isinstance(resources, dict) else resources + @classmethod + def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: + return ExtractionPipelinesAcl( + [ExtractionPipelinesAcl.Action.Read, ExtractionPipelinesAcl.Action.Write], + ExtractionPipelinesAcl.Scope.All(), + ) - for resource in resources: - extraction_pipeline_config = ExtractionPipelineConfig.load( - { - "externalId": resource.get("externalId"), - "description": resource.get("description"), - "config": yaml.dump(resource.get("config", ""), indent=4), - } + def get_id(self, item: ExtractionPipeline) -> str: + return item.external_id + + def load_resource(self, filepath: Path, dry_run: bool) -> ExtractionPipelineConfig: + resource = load_yaml_inject_variables(filepath, {}) + try: + resource["config"] = yaml.dump(resource.get("config", ""), indent=4) + except Exception: + print( + "[yellow]WARNING:[/] configuration could not be parsed as valid YAML, which is the recommended format.\n" ) - try: - self.client.extraction_pipelines.config.create(extraction_pipeline_config) + resource["config"] = resource.get("config", "") + return ExtractionPipelineConfig.load(resource) - except Exception as e: - print(f"[bold red]ERROR:[/] Failed to create extraction pipeline config.\n{e}") - self.ToolGlobals.failed = True + def create( + self, items: Sequence[ExtractionPipelineConfig], drop: bool, filepath: Path + ) -> list[ExtractionPipelineConfig]: + try: + return [self.client.extraction_pipelines.config.create(items[0])] + except Exception as e: + print(f"[bold red]ERROR:[/] Failed to create extraction pipelines.\n{e}") + self.ToolGlobals.failed = True + return ExtractionPipelineConfig() - return extraction_pipelines + def delete(self, ids: Sequence[str], drop_data: bool) -> int: + configs = self.client.extraction_pipelines.config.list(external_id=ids) + return len(configs) @final diff --git a/cognite_toolkit/config.yaml b/cognite_toolkit/config.yaml index 7a1fa4890..5d5c15a57 100644 --- a/cognite_toolkit/config.yaml +++ b/cognite_toolkit/config.yaml @@ -57,6 +57,7 @@ cognite_modules: cdfProjectName: ${CDF_PROJECT} scopes: ${IDP_SCOPES} audience: ${IDP_AUDIENCE} + pause_transformations: true experimental: cdf_asset_source_model: # Only valid for this module, loads template variables from environment diff --git a/cognite_toolkit/environments.yaml b/cognite_toolkit/environments.yaml index b54001b96..946d8b854 100644 --- a/cognite_toolkit/environments.yaml +++ b/cognite_toolkit/environments.yaml @@ -24,14 +24,10 @@ demo: - cdf_demo_infield - cdf_oid_example_data local: - project: + project: trial-572dca111144a5196a6b1 type: dev deploy: - - cdf_auth_readwrite_all - - cdf_apm_base - - cdf_oid_example_data - - cdf_infield_common - - cdf_infield_location + - cdf_data_pipeline_asset_valhall dev: project: type: dev @@ -48,3 +44,4 @@ prod: type: prod deploy: - cdf_infield + diff --git a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml index 2b189bf86..a1e7bdce2 100644 --- a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml +++ b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml @@ -113,7 +113,7 @@ Transformation: \ `asset_oid_workmate`.`assets`\n" schedule: externalId: tr_asset_oid_workmate_asset_hierarchy - interval: 0 * * * * + interval: 7 * * * * isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} @@ -124,7 +124,7 @@ Transformation: tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: - externalId: tr_asset_oid_workmate_asset_hierarchy - interval: 0 * * * * + interval: 7 * * * * isPaused: true deleted: Transformation: From d69ddd864b742c04f8e9caabea1f2ba463e9d577 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Wed, 13 Dec 2023 13:28:09 +0100 Subject: [PATCH 55/90] Asset pipeline in demo project --- demo/config.yaml | 23 +++++++++++++++++++++++ demo/environments.yaml | 1 + demo/preproc.py | 8 ++++++-- 3 files changed, 30 insertions(+), 2 deletions(-) mode change 100644 => 100755 demo/preproc.py diff --git a/demo/config.yaml b/demo/config.yaml index 4374754cf..3c550de15 100644 --- a/demo/config.yaml +++ b/demo/config.yaml @@ -5,3 +5,26 @@ infield_default_location_checklist_admin_users_source_id: 684237e9-c1fd-4d3c-8c5 infield_default_location_normal_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e infield_default_location_template_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f infield_default_location_viewer_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e + +cdf_data_pipeline_asset_valhall: + asset_dataset: ds_asset_oid + # source ID from Azure AD for the corresponding groups, ex 'c74797ce-9191-4a4a-9186-8fe21c54c3de' + asset_location_extractor_group_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f + asset_location_processing_group_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f + asset_location_read_group_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f + asset_raw_input_db: asset_oid_workmate + asset_raw_input_table: assets + # Optional: If idP requires providing the audience + audience: ${IDP_AUDIENCE} + # Optional: If idP requires providing the scopes + cdfProjectName: ${CDF_PROJECT} + # Transformation credentials + clientId: ${IDP_CLIENT_ID} + clientSecret: ${IDP_CLIENT_SECRET} + location_name: oid + module_version: '1' + pause_transformations: true + scopes: ${IDP_SCOPES} + # spesify the name of the source making it possible to identify where the data orginates from, ex: 'workmate', 'sap', 'oracle',.. + source_name: workmate + tokenUri: ${IDP_TOKEN_URL} diff --git a/demo/environments.yaml b/demo/environments.yaml index efc6e204e..c63eebb3d 100644 --- a/demo/environments.yaml +++ b/demo/environments.yaml @@ -21,3 +21,4 @@ demo: deploy: - cdf_demo_infield - cdf_oid_example_data + - cdf_data_pipeline_asset_valhall diff --git a/demo/preproc.py b/demo/preproc.py old mode 100644 new mode 100755 index 63b86fc3a..459e5987f --- a/demo/preproc.py +++ b/demo/preproc.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -import shutil +import os, shutil from pathlib import Path import yaml @@ -10,14 +10,18 @@ def run() -> None: print("Running copy commands to prep deployment of demo...") + os.makedirs(DEMO_PROJECT, exist_ok=True) print("Copying my enviroments.yaml to root of repo...") - shutil.copy(THIS_FOLDER / "environments.yaml", DEMO_PROJECT) + shutil.copy(THIS_FOLDER / "environments.yaml", DEMO_PROJECT / "environments.yaml") print("Copying config.yaml into demo project...") + shutil.copy(THIS_FOLDER / "config.yaml", DEMO_PROJECT / "config.yaml") config_yaml_path = DEMO_PROJECT / "config.yaml" + variables = yaml.safe_load((THIS_FOLDER / "config.yaml").read_text()) config_yaml = config_yaml_path.read_text() for key, value in variables.items(): config_yaml = config_yaml.replace(f"{key}: ", f"{key}: {value}") + config_yaml_path.write_text(config_yaml) From 19f46ff5504b69b54b081613d218276e0e6aec54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Wed, 13 Dec 2023 14:17:40 +0100 Subject: [PATCH 56/90] adjusting demo deploy preprocessing --- .../default.config.yaml | 6 ++-- demo/config.yaml | 30 ------------------- demo/demo_config.yaml | 12 ++++++++ demo/preproc.py | 10 +++++-- .../cdf_data_pipeline_asset_valhall.yaml | 8 ++--- 5 files changed, 26 insertions(+), 40 deletions(-) delete mode 100644 demo/config.yaml create mode 100644 demo/demo_config.yaml diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/default.config.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/default.config.yaml index 8b1ba78bd..c5520b95a 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/default.config.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/default.config.yaml @@ -15,9 +15,9 @@ asset_raw_input_db: asset_oid_workmate asset_raw_input_table: assets # source ID from Azure AD for the corresponding groups, ex 'c74797ce-9191-4a4a-9186-8fe21c54c3de' -asset_location_extractor_group_source_id: -asset_location_processing_group_source_id: -asset_location_read_group_source_id: +asset_location_extractor_group_source_id: +asset_location_processing_group_source_id: +asset_location_read_group_source_id: pause_transformations: true diff --git a/demo/config.yaml b/demo/config.yaml deleted file mode 100644 index 3c550de15..000000000 --- a/demo/config.yaml +++ /dev/null @@ -1,30 +0,0 @@ -readwrite_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f -readonly_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e -applicationsconfiguration_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e -infield_default_location_checklist_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f -infield_default_location_normal_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e -infield_default_location_template_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f -infield_default_location_viewer_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e - -cdf_data_pipeline_asset_valhall: - asset_dataset: ds_asset_oid - # source ID from Azure AD for the corresponding groups, ex 'c74797ce-9191-4a4a-9186-8fe21c54c3de' - asset_location_extractor_group_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f - asset_location_processing_group_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f - asset_location_read_group_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f - asset_raw_input_db: asset_oid_workmate - asset_raw_input_table: assets - # Optional: If idP requires providing the audience - audience: ${IDP_AUDIENCE} - # Optional: If idP requires providing the scopes - cdfProjectName: ${CDF_PROJECT} - # Transformation credentials - clientId: ${IDP_CLIENT_ID} - clientSecret: ${IDP_CLIENT_SECRET} - location_name: oid - module_version: '1' - pause_transformations: true - scopes: ${IDP_SCOPES} - # spesify the name of the source making it possible to identify where the data orginates from, ex: 'workmate', 'sap', 'oracle',.. - source_name: workmate - tokenUri: ${IDP_TOKEN_URL} diff --git a/demo/demo_config.yaml b/demo/demo_config.yaml new file mode 100644 index 000000000..dc112cbb4 --- /dev/null +++ b/demo/demo_config.yaml @@ -0,0 +1,12 @@ +readwrite_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f +readonly_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e +applicationsconfiguration_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e +infield_default_location_checklist_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f +infield_default_location_normal_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e +infield_default_location_template_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f +infield_default_location_viewer_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e + +#cdf_data_pipeline_asset_valhall: +asset_location_extractor_group_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f +asset_location_processing_group_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f +asset_location_read_group_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f \ No newline at end of file diff --git a/demo/preproc.py b/demo/preproc.py index 459e5987f..ff51296c4 100755 --- a/demo/preproc.py +++ b/demo/preproc.py @@ -6,20 +6,24 @@ THIS_FOLDER = Path(__file__).parent.absolute() DEMO_PROJECT = THIS_FOLDER.parent / "demo_project" - +TOOLKIT_FOLDER = THIS_FOLDER.parent / "cognite_toolkit" def run() -> None: + + print(TOOLKIT_FOLDER) + print("Running copy commands to prep deployment of demo...") os.makedirs(DEMO_PROJECT, exist_ok=True) print("Copying my enviroments.yaml to root of repo...") shutil.copy(THIS_FOLDER / "environments.yaml", DEMO_PROJECT / "environments.yaml") print("Copying config.yaml into demo project...") - shutil.copy(THIS_FOLDER / "config.yaml", DEMO_PROJECT / "config.yaml") + shutil.copy(TOOLKIT_FOLDER / "config.yaml", DEMO_PROJECT / "config.yaml") config_yaml_path = DEMO_PROJECT / "config.yaml" - variables = yaml.safe_load((THIS_FOLDER / "config.yaml").read_text()) + variables = yaml.safe_load((THIS_FOLDER / "demo_config.yaml").read_text()) config_yaml = config_yaml_path.read_text() for key, value in variables.items(): + print(f"updating {key}") config_yaml = config_yaml.replace(f"{key}: ", f"{key}: {value}") config_yaml_path.write_text(config_yaml) diff --git a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml index a1e7bdce2..f568c481b 100644 --- a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml +++ b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml @@ -29,7 +29,7 @@ Group: module_version: '1' origin: cdf-project-templates name: gp_asset_oid_extractor - sourceId: + sourceId: - capabilities: - transformationsAcl: actions: @@ -48,7 +48,7 @@ Group: module_version: '1' origin: cdf-project-templates name: gp_asset_oid_processing - sourceId: + sourceId: - capabilities: - rawAcl: actions: @@ -71,7 +71,7 @@ Group: module_version: '1' origin: cdf-project-templates name: gp_asset_oid_processing - sourceId: + sourceId: - capabilities: - assetsAcl: actions: @@ -84,7 +84,7 @@ Group: module_version: '1' origin: cdf-project-templates name: gp_asset_oid_read - sourceId: + sourceId: Transformation: - conflictMode: upsert destination: From a3ce5da036744487adf88d89d7dc77940cfb8425 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Wed, 13 Dec 2023 14:23:08 +0100 Subject: [PATCH 57/90] changed from to in some config files --- cognite_toolkit/config.yaml | 48 +++++++++++++++++++++++-------------- 1 file changed, 30 insertions(+), 18 deletions(-) diff --git a/cognite_toolkit/config.yaml b/cognite_toolkit/config.yaml index 0fea1cfdb..10113c844 100644 --- a/cognite_toolkit/config.yaml +++ b/cognite_toolkit/config.yaml @@ -44,6 +44,35 @@ cognite_modules: view_WorkItem_version: '1' view_WorkOrder_version: '1' + # This is the configuration file used for the example data from The Open Industrial Data (oid) + # + # The data originates from a single compressor on Aker BP’s Valhall oil platform + # in the North Sea. Aker BP selected the first stage compressor on the Valhall + # because it is a subsystem with clearly defined boundaries, rich in time series and maintenance data. + # spesify the site/asset location where data comes from, ex 'valhall_oid' or if they are generic for all assets use 'all' + cdf_data_pipeline_asset_valhall: + asset_dataset: ds_asset_oid + # source ID from Azure AD for the corresponding groups, ex 'c74797ce-9191-4a4a-9186-8fe21c54c3de' + asset_location_extractor_group_source_id: + asset_location_processing_group_source_id: + asset_location_read_group_source_id: + asset_raw_input_db: asset_oid_workmate + asset_raw_input_table: assets + # Optional: If idP requires providing the audience + audience: ${IDP_AUDIENCE} + # Optional: If idP requires providing the scopes + cdfProjectName: ${CDF_PROJECT} + # Transformation credentials + clientId: ${IDP_CLIENT_ID} + clientSecret: ${IDP_CLIENT_SECRET} + location_name: oid + module_version: '1' + pause_transformations: true + scopes: ${IDP_SCOPES} + # spesify the name of the source making it possible to identify where the data orginates from, ex: 'workmate', 'sap', 'oracle',.. + source_name: workmate + tokenUri: ${IDP_TOKEN_URL} + # Only valid for this module, loads template variables from environment # # In the example below we are setting up a project based on the Open Industry Data (OID), @@ -67,24 +96,7 @@ cognite_modules: example_pump_asset_hierarchy: data_set: src:lift_pump_stations raw_db: pump_assets - - cdf_data_pipeline_asset_valhall: - location_name: oid - module_version: '1' - source_name: workmate - asset_dataset: ds_asset_oid - asset_raw_input_db: asset_oid_workmate - asset_raw_input_table: assets - asset_location_extractor_group_source_id: - asset_location_processing_group_source_id: - asset_location_read_group_source_id: - clientId: ${IDP_CLIENT_ID} - clientSecret: ${IDP_CLIENT_SECRET} - tokenUri: ${IDP_TOKEN_URL} - cdfProjectName: ${CDF_PROJECT} - scopes: ${IDP_SCOPES} - audience: ${IDP_AUDIENCE} - pause_transformations: true + experimental: # Only valid for this module, loads template variables from environment cdf_asset_source_model: From 081fbb6392dcd5742d694bd95acdba82ed2d2e75 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Wed, 13 Dec 2023 14:25:45 +0100 Subject: [PATCH 58/90] linting --- demo/preproc.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/demo/preproc.py b/demo/preproc.py index ff51296c4..52a49a200 100755 --- a/demo/preproc.py +++ b/demo/preproc.py @@ -1,5 +1,6 @@ #!/usr/bin/env python -import os, shutil +import os +import shutil from pathlib import Path import yaml @@ -8,8 +9,8 @@ DEMO_PROJECT = THIS_FOLDER.parent / "demo_project" TOOLKIT_FOLDER = THIS_FOLDER.parent / "cognite_toolkit" -def run() -> None: +def run() -> None: print(TOOLKIT_FOLDER) print("Running copy commands to prep deployment of demo...") @@ -25,7 +26,7 @@ def run() -> None: for key, value in variables.items(): print(f"updating {key}") config_yaml = config_yaml.replace(f"{key}: ", f"{key}: {value}") - + config_yaml_path.write_text(config_yaml) From a4686f729c117f6a76dc2cd1f35567c0f0da0746 Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Wed, 13 Dec 2023 14:44:47 +0100 Subject: [PATCH 59/90] [CDF-20461] Join infield location transformations (#215) * refactor; Single transformation * refactor; deleted parent transformation * refactor: update test data * build: changelog * tests; updated test data * fix: issue in SQL * tests: regen test data --- CHANGELOG.templates.md | 7 ++- ...nc_asset_parents_from_hierarchy_to_apm.sql | 9 --- ...c_asset_parents_from_hierarchy_to_apm.yaml | 26 -------- ...ield_sync_assets_from_hierarchy_to_apm.sql | 28 ++++++++- ...eld_sync_assets_from_hierarchy_to_apm.yaml | 2 +- .../cdf_infield_location.yaml | 60 +++++-------------- .../cdf_infield_location.yaml | 1 - 7 files changed, 47 insertions(+), 86 deletions(-) delete mode 100644 cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.sql delete mode 100644 cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index 5e32b8482..544b948b3 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -28,8 +28,11 @@ Changes are grouped as follows: - Move cdf_apm_base into separate folder. - The file `local.yaml` has been renamed `environments.yaml` to better reflect its purpose. - Removed demo `sourceId` from `cdf_infield_location` module. -- Changed the isPaused flag to use a module-level variable instead of hardcoded in cdf_apm_simple_data_model. - +- Changed the isPaused flag to use a module-level variable instead of hardcoded in `cdf_apm_simple_data_model`. +- Combined the child and parent transformations `sync_assets_from_hierarchy_to_apm` in `cdf_infield_location`. + This has the benefit of not having to wait for the parent transformation to finish before starting the child transformation, + thus no longer a dependency between the two transformations. + ### Fixed - Removed transformation identity provider variables from modules and reused the global cicd_ prefixed ones. diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.sql b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.sql deleted file mode 100644 index e1720e3db..000000000 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.sql +++ /dev/null @@ -1,9 +0,0 @@ -select - cast(asset.externalId as STRING) as externalId, - (case - when isnull(asset.parentExternalId) then null - else node_reference('sp_asset_{{default_location}}_source', asset.parentExternalId) - end) as parent -from - cdf_assetSubtree('{{root_asset_external_id}}') as asset - inner join cdf_assetSubtree('{{root_asset_external_id}}') as rootAsset on asset.rootId = rootAsset.id \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml deleted file mode 100644 index 5e90cbcae..000000000 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm.yaml +++ /dev/null @@ -1,26 +0,0 @@ -externalId: tr_asset_{{default_location}}_{{source_asset}}_infield_sync_asset_parents_from_hierarchy_to_apm -name: asset:{{default_location}}:{{source_asset}}:infield:sync_asset_parents_from_hierarchy_to_apm -destination: - view: - space: cdf_core - externalId: Asset - version: 'v1' - instanceSpace: 'sp_asset_{{default_location}}_source' - type: nodes -ignoreNullFields: true -shared: true -action: upsert -# Specify credentials separately like this: -# You can also use different credentials for the running transformations than the ones you use to deploy -authentication: - clientId: {{cicd_clientId}} - clientSecret: {{cicd_clientSecret}} - tokenUri: {{cicd_tokenUri}} - # Optional: If idP requires providing the scopes - cdfProjectName: {{cdfProjectName}} - scopes: {{cicd_scopes}} - # Optional: If idP requires providing the audience - audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' -isPaused: false diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.sql b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.sql index 0782d4060..97614cbb8 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.sql +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.sql @@ -1,5 +1,28 @@ +--- Create All Assets without properties +--- This is necessary so we can populate the direct relations +--- in the next select statement select - cast(asset.externalId as STRING) as externalId, + cast(asset.externalId as STRING) as externalId, + --- We skip the remaining properties, + --- but need to have these columns set so we can do a UNION ALL operation with the statement below. + null as parent, + null as source, + null as root, + null as description, + null as title, + null as sourceId +from + cdf_assetSubtree('{{root_asset_external_id}}') as asset + +UNION ALL + +--- Create All Assets with properties including direct relations +select + cast(asset.externalId as STRING) as externalId, + (case + when isnull(asset.parentExternalId) then null + else node_reference('sp_asset_{{default_location}}_source', asset.parentExternalId) + end) as parent, cast("Asset Hierarachy" as STRING) as source, node_reference('sp_asset_{{default_location}}_source', cast(rootAsset.externalId as STRING)) as root, cast(asset.description as STRING) as description, @@ -7,4 +30,5 @@ select cast(asset.externalId as STRING) as sourceId from cdf_assetSubtree('{{root_asset_external_id}}') as asset - inner join cdf_assetSubtree('{{root_asset_external_id}}') as rootAsset on asset.rootId = rootAsset.id \ No newline at end of file + -- Get root asset + inner join cdf_assetSubtree('{{root_asset_external_id}}') as rootAsset on asset.rootId = rootAsset.id diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml index 2a6e1308b..411e651da 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml @@ -23,4 +23,4 @@ authentication: audience: {{cicd_audience}} schedule: interval: '{{scheduleHourly}}' - isPaused: false \ No newline at end of file + isPaused: false diff --git a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml index 6ee5e8153..1ea6ae726 100644 --- a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml +++ b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml @@ -419,41 +419,6 @@ Space: name: sp:infield:oid:source space: sp_asset_oid_source Transformation: -- destination: - instanceSpace: sp_asset_oid_source - type: nodes - view: - externalId: Asset - space: cdf_core - version: v1 - destinationOidcCredentials: - audience: ${IDP_AUDIENCE} - cdfProjectName: ${CDF_PROJECT} - clientId: ${IDP_CLIENT_ID} - clientSecret: ${IDP_CLIENT_SECRET} - scopes: ${IDP_SCOPES} - tokenUri: ${IDP_TOKEN_URL} - externalId: tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm - ignoreNullFields: true - isPublic: true - name: asset:oid:workmate:infield:sync_asset_parents_from_hierarchy_to_apm - ownerIsCurrentUser: true - query: "select\n cast(asset.externalId as STRING) as externalId,\n (case\n \ - \ when isnull(asset.parentExternalId) then null\n else node_reference('sp_asset_oid_source',\ - \ asset.parentExternalId) \n end) as parent\nfrom\n cdf_assetSubtree('WMT:VAL')\ - \ as asset\n inner join cdf_assetSubtree('WMT:VAL') as rootAsset on asset.rootId\ - \ = rootAsset.id" - schedule: - externalId: tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm - interval: 7 * * * * - isPaused: false - sourceOidcCredentials: - audience: ${IDP_AUDIENCE} - cdfProjectName: ${CDF_PROJECT} - clientId: ${IDP_CLIENT_ID} - clientSecret: ${IDP_CLIENT_SECRET} - scopes: ${IDP_SCOPES} - tokenUri: ${IDP_TOKEN_URL} - destination: instanceSpace: sp_asset_oid_source type: nodes @@ -473,12 +438,21 @@ Transformation: isPublic: true name: asset:oid:workmate:infield:sync_assets_from_hierarchy_to_apm ownerIsCurrentUser: true - query: "select\n cast(asset.externalId as STRING) as externalId, \n cast(\"Asset\ - \ Hierarachy\" as STRING) as source,\n node_reference('sp_asset_oid_source',\ - \ cast(rootAsset.externalId as STRING)) as root,\n cast(asset.description as\ - \ STRING) as description,\n cast(asset.name as STRING) as title,\n cast(asset.externalId\ - \ as STRING) as sourceId\nfrom\n cdf_assetSubtree('WMT:VAL') as asset\n inner\ - \ join cdf_assetSubtree('WMT:VAL') as rootAsset on asset.rootId = rootAsset.id" + query: "--- Create All Assets without properties\n--- This is necessary so we can\ + \ populate the direct relations\n--- in the next select statement\nselect\n cast(asset.externalId\ + \ as STRING) as externalId,\n --- We skip the remaining properties,\n --- but\ + \ need to have these columns set so we can do a UNION ALL operation with the statement\ + \ below.\n null as parent,\n null as source,\n null as root,\n null as description,\n\ + \ null as title,\n null as sourceId\nfrom\n cdf_assetSubtree('WMT:VAL') as\ + \ asset\n\nUNION ALL\n\n--- Create All Assets with properties including direct\ + \ relations\nselect\n cast(asset.externalId as STRING) as externalId,\n (case\n\ + \ when isnull(asset.parentExternalId) then null\n else node_reference('sp_asset_oid_source',\ + \ asset.parentExternalId)\n end) as parent,\n cast(\"Asset Hierarachy\" as STRING)\ + \ as source,\n node_reference('sp_asset_oid_source', cast(rootAsset.externalId\ + \ as STRING)) as root,\n cast(asset.description as STRING) as description,\n\ + \ cast(asset.name as STRING) as title,\n cast(asset.externalId as STRING) as\ + \ sourceId\nfrom\n cdf_assetSubtree('WMT:VAL') as asset\n -- Get root asset\n\ + \ inner join cdf_assetSubtree('WMT:VAL') as rootAsset on asset.rootId = rootAsset.id\n" schedule: externalId: tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm interval: 7 * * * * @@ -530,9 +504,6 @@ Transformation: scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: -- externalId: tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm - interval: 7 * * * * - isPaused: false - externalId: tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm interval: 7 * * * * isPaused: false @@ -541,6 +512,5 @@ TransformationSchedule: isPaused: false deleted: Transformation: - - externalId: tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm - externalId: tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm - externalId: tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities diff --git a/tests/test_approval_modules_snapshots_clean/cdf_infield_location.yaml b/tests/test_approval_modules_snapshots_clean/cdf_infield_location.yaml index 34e165331..fd4c139c1 100644 --- a/tests/test_approval_modules_snapshots_clean/cdf_infield_location.yaml +++ b/tests/test_approval_modules_snapshots_clean/cdf_infield_location.yaml @@ -7,6 +7,5 @@ deleted: - sp_asset_oid_source - sp_infield_oid_app_data Transformation: - - externalId: tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm - externalId: tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm - externalId: tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities From 688bf3960168a1e07ad505ba0eb000ee0c029659 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Wed, 13 Dec 2023 14:47:13 +0100 Subject: [PATCH 60/90] siplified a bit --- demo/demo_config.yaml | 12 ------------ demo/preproc.py | 16 +--------------- 2 files changed, 1 insertion(+), 27 deletions(-) delete mode 100644 demo/demo_config.yaml diff --git a/demo/demo_config.yaml b/demo/demo_config.yaml deleted file mode 100644 index dc112cbb4..000000000 --- a/demo/demo_config.yaml +++ /dev/null @@ -1,12 +0,0 @@ -readwrite_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f -readonly_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e -applicationsconfiguration_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e -infield_default_location_checklist_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f -infield_default_location_normal_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e -infield_default_location_template_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f -infield_default_location_viewer_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e - -#cdf_data_pipeline_asset_valhall: -asset_location_extractor_group_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f -asset_location_processing_group_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f -asset_location_read_group_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f \ No newline at end of file diff --git a/demo/preproc.py b/demo/preproc.py index 52a49a200..28056da15 100755 --- a/demo/preproc.py +++ b/demo/preproc.py @@ -3,31 +3,17 @@ import shutil from pathlib import Path -import yaml - THIS_FOLDER = Path(__file__).parent.absolute() DEMO_PROJECT = THIS_FOLDER.parent / "demo_project" -TOOLKIT_FOLDER = THIS_FOLDER.parent / "cognite_toolkit" def run() -> None: - print(TOOLKIT_FOLDER) - print("Running copy commands to prep deployment of demo...") os.makedirs(DEMO_PROJECT, exist_ok=True) print("Copying my enviroments.yaml to root of repo...") shutil.copy(THIS_FOLDER / "environments.yaml", DEMO_PROJECT / "environments.yaml") print("Copying config.yaml into demo project...") - shutil.copy(TOOLKIT_FOLDER / "config.yaml", DEMO_PROJECT / "config.yaml") - config_yaml_path = DEMO_PROJECT / "config.yaml" - - variables = yaml.safe_load((THIS_FOLDER / "demo_config.yaml").read_text()) - config_yaml = config_yaml_path.read_text() - for key, value in variables.items(): - print(f"updating {key}") - config_yaml = config_yaml.replace(f"{key}: ", f"{key}: {value}") - - config_yaml_path.write_text(config_yaml) + shutil.copy(THIS_FOLDER / "config.yaml", DEMO_PROJECT / "config.yaml") if __name__ == "__main__": From cbe1abdb0f57e4ee98ffc798b9459f64476b5ea1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Wed, 13 Dec 2023 14:54:19 +0100 Subject: [PATCH 61/90] unignore demo config --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 68120f5ae..ba5834787 100644 --- a/.gitignore +++ b/.gitignore @@ -278,5 +278,6 @@ new_project/ # If you need to update the cognite_toolkit template files for local.yaml and config.yaml, comment below local.yaml config.yaml +!demo/config.yaml demo_project/ tests/pytest-project/ From 27438bf914f7b2e17e1fd2baadee7ff5010b3496 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Wed, 13 Dec 2023 14:55:21 +0100 Subject: [PATCH 62/90] re-adding demo/config.yaml --- demo/config.yaml | 138 +++++++++++++++++++++++++++++++++++++++++++++++ demo/preproc.py | 2 +- 2 files changed, 139 insertions(+), 1 deletion(-) create mode 100644 demo/config.yaml diff --git a/demo/config.yaml b/demo/config.yaml new file mode 100644 index 000000000..e2db5392a --- /dev/null +++ b/demo/config.yaml @@ -0,0 +1,138 @@ +# Globally variables are available to all submodules +# of the cognite_modules. +cognite_modules: + cdfProjectName: ${CDF_PROJECT} + cdf_cluster: ${CDF_CLUSTER} + # Optional: If idP requires providing the audience + cicd_audience: ${IDP_AUDIENCE} + cicd_clientId: ${IDP_CLIENT_ID} + cicd_clientSecret: ${IDP_CLIENT_SECRET} + # Optional: If idP requires providing the scopes + cicd_scopes: + - ${IDP_SCOPES} + cicd_tokenUri: ${IDP_TOKEN_URL} + # Daily at 1:35 AM + scheduleDaily: 35 1 * * * + # Seven minutes past each hour + scheduleHourly: 7 * * * * + # Every fifteen minutes + scheduleQuarterly: 0/15 * * * * + common: + cdf_auth_readwrite_all: + readonly_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e + readwrite_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f + + core: + cdf_apm_base: + apm_datamodel_space: APM_SourceData + apm_datamodel_version: '1' + + examples: + # Values here are only valid for this module. + # The raw database values here point to the RAW data loaded in the cdf_oid_example_data + # module. If you have changed the default values in that module, you need to change them here as well. + cdf_apm_simple_data_model: + datamodel: apm_simple + datamodel_version: '1' + default_location: oid + pause_transformations: true + source_asset: workmate + source_timeseries: pi + source_workorder: workmate + space: apm_simple + view_Asset_version: '1' + view_WorkItem_version: '1' + view_WorkOrder_version: '1' + + # Only valid for this module, loads template variables from environment + # + # In the example below we are setting up a project based on the Open Industry Data (OID), + # that originates from the Valhall oil rig. Note that the location/site is NOT used + # to structure the data when on-boarding. The expectation is that a single source system + # and it's data pipeline may supply data for multiple locations/sites. + # The structuring of the data based on site/location should happen as part of processing + # the data in CDF, i.e. contextualisation. + # + # Each of data resource types have assigned the source system where the data originates from. + # This information will be used to construct RAW database names, and to create data sets in CDF, + # and can be used to control access. + cdf_oid_example_data: + default_location: oid + source_asset: workmate + source_files: fileshare + source_timeseries: pi + source_workorder: workmate + + # Only valid for this module, loads template variables from environment + example_pump_asset_hierarchy: + data_set: src:lift_pump_stations + raw_db: pump_assets + + cdf_data_pipeline_asset_valhall: + location_name: oid + module_version: '1' + source_name: workmate + asset_dataset: ds_asset_oid + asset_raw_input_db: asset_oid_workmate + asset_raw_input_table: assets + asset_location_extractor_group_source_id: + asset_location_processing_group_source_id: + asset_location_read_group_source_id: + clientId: ${IDP_CLIENT_ID} + clientSecret: ${IDP_CLIENT_SECRET} + tokenUri: ${IDP_TOKEN_URL} + cdfProjectName: ${CDF_PROJECT} + scopes: ${IDP_SCOPES} + audience: ${IDP_AUDIENCE} + pause_transformations: true + experimental: + # Only valid for this module, loads template variables from environment + cdf_asset_source_model: + data_model_version: '1' + instance_space: cdfTemplateInstances + model_space: ExtendedSourceDataModels + root_asset_external_id: lift_pump_stations:root + view_asset_version: '1' + + # Only valid for this module, loads template variables from environment + example_pump_data_model: + data_model: PumpLiftStations + data_model_version: '1' + instance_space: pumpInstanceSpace + model_space: pumpModelSpace + source_model: ExtendedSourceData + source_model_space: ExtendedSourceDataModels + view_LiftStation_version: '1' + view_Pump_version: '1' + + infield: + cdf_infield_common: + applicationsconfiguration_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e + + # This default_location points to the location created by the cdf_oid_example_data module. + # When you create your own location by copying the cdf_oid_example_data module to + # set up data sets and groups, the below needs to refer to the location to define. + # + cdf_infield_location: + apm_app_config_external_id: default-infield-config-minimal + apm_config_instance_space: APM_Config + apm_datamodel_space: APM_SourceData + default_location: oid + # infield and must be updated for each location + infield_default_location_checklist_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f + infield_default_location_normal_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e + infield_default_location_template_admin_users_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f + infield_default_location_viewer_users_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e + module_version: '1' + # the root asset for this location, needs to be updated for each location + root_asset_external_id: WMT:VAL + # changed if you want to load workorders from another RAW database. + source_asset: workmate + source_workorder: workmate + workorder_raw_db: workorder_oid_workmate + # The table name in the raw_db database that has workorder data + workorder_table_name: workorders + +custom_modules: + my_example_module: + example_variable: demo_dataset # This is the variable that will be used in the module diff --git a/demo/preproc.py b/demo/preproc.py index 28056da15..8e112ce35 100755 --- a/demo/preproc.py +++ b/demo/preproc.py @@ -10,7 +10,7 @@ def run() -> None: print("Running copy commands to prep deployment of demo...") os.makedirs(DEMO_PROJECT, exist_ok=True) - print("Copying my enviroments.yaml to root of repo...") + print("Copying my environments.yaml to root of repo...") shutil.copy(THIS_FOLDER / "environments.yaml", DEMO_PROJECT / "environments.yaml") print("Copying config.yaml into demo project...") shutil.copy(THIS_FOLDER / "config.yaml", DEMO_PROJECT / "config.yaml") From d150d31858bf4f99ff18c5fdb7cc1eb220e3040d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Wed, 13 Dec 2023 14:59:13 +0100 Subject: [PATCH 63/90] re-adding demo/config.yaml --- demo/config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/demo/config.yaml b/demo/config.yaml index e2db5392a..66fa78ab3 100644 --- a/demo/config.yaml +++ b/demo/config.yaml @@ -75,9 +75,9 @@ cognite_modules: asset_dataset: ds_asset_oid asset_raw_input_db: asset_oid_workmate asset_raw_input_table: assets - asset_location_extractor_group_source_id: - asset_location_processing_group_source_id: - asset_location_read_group_source_id: + asset_location_extractor_group_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f + asset_location_processing_group_source_id: 684237e9-c1fd-4d3c-8c50-de9ea30ac16f + asset_location_read_group_source_id: 7bdcb20c-3e6a-400c-b9ef-cf835f64f05e clientId: ${IDP_CLIENT_ID} clientSecret: ${IDP_CLIENT_SECRET} tokenUri: ${IDP_TOKEN_URL} From a927e76077f3aa300c81e151211b4464e563858e Mon Sep 17 00:00:00 2001 From: Jan Inge Bergseth <31886431+BergsethCognite@users.noreply.github.com> Date: Wed, 13 Dec 2023 15:07:58 +0100 Subject: [PATCH 64/90] Update source_asset_valhall_workmate.config.yaml added dataSetExternalId to extraction pipeline config --- .../source_asset_valhall_workmate.config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml index 0c71b4189..9a5570b04 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/extraction_pipelines/source_asset_valhall_workmate.config.yaml @@ -1,4 +1,5 @@ externalId: 'ep_src_asset_{{location_name}}_{{source_name}}' +dataSetExternalId: 'ds_asset_{{location_name}}' description: 'DB extractor config reading data from {{location_name}}:{{source_name}}' config: logger: From 17567078432f19a399ddd35890f495431a73d544 Mon Sep 17 00:00:00 2001 From: Jan Inge Bergseth <31886431+BergsethCognite@users.noreply.github.com> Date: Wed, 13 Dec 2023 15:09:27 +0100 Subject: [PATCH 65/90] Update tr_asset_oid_workmate_asset_hierarchy.yaml added dataSetExternalId --- .../transformations/tr_asset_oid_workmate_asset_hierarchy.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml index 38c321801..f2119d09b 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml @@ -1,4 +1,5 @@ externalId: 'tr_asset_{{location_name}}_{{source_name}}_asset_hierarchy' +dataSetExternalId: 'ds_asset_{{location_name}}' name: 'asset:{{location_name}}:{{source_name}}:asset_hierarchy' destination: type: "asset_hierarchy" @@ -19,4 +20,4 @@ authentication: schedule: # every hour interval: '{{scheduleHourly}}' - isPaused: {{pause_transformations}} \ No newline at end of file + isPaused: {{pause_transformations}} From 8977f502cea627576e223ef28a0ac690c8da4e16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Thu, 14 Dec 2023 13:42:40 +0100 Subject: [PATCH 66/90] Missing extraction pipeline does not raise exception --- cognite_toolkit/cdf_tk/utils.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/cognite_toolkit/cdf_tk/utils.py b/cognite_toolkit/cdf_tk/utils.py index f7024f3c1..7306f812f 100644 --- a/cognite_toolkit/cdf_tk/utils.py +++ b/cognite_toolkit/cdf_tk/utils.py @@ -347,9 +347,11 @@ def verify_extraction_pipeline(self, external_id: str) -> int: if pipeline is not None: return pipeline.id - raise ValueError( - f"Extraction pipeline {external_id} does not exist, you need to create it first. Do this by adding a config file to the extraction_pipelines folder." - ) + else: + print( + f" [bold yellow]WARNING[/] Extraction pipeline {external_id} does not exist. It may have been deleted, or not been part of the module." + ) + return -1 def verify_spaces(self, space: str | list[str]) -> list[str]: """Verify that the configured space exists and is accessible From d01ed74f6d1554ac801236e60ef4185ad8e4bbee Mon Sep 17 00:00:00 2001 From: Greger Teigre Wedel Date: Thu, 14 Dec 2023 16:13:04 +0100 Subject: [PATCH 67/90] Update docs in preperation for beta (#211) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update contribution docs * Fix example workorder dates * Add wrapper for dev work on cdf-tk and fixup cdf.py * Added datapipeline/extraction pipeline to changelogs * Further cleanups in --help * Update READMEs for common and core modules * Fix hardcoded values in transformation * Update READMEs for examples * Update READMEs for experimental modules * Fix tests * Make twine error fail the action * Apply suggestions from code review Co-authored-by: Pål Rønning * Further fixes from review --------- Co-authored-by: Pål Rønning Co-authored-by: Pål Rønning --- .github/workflows/release.yaml | 2 +- .vscode/launch.json | 8 +- CHANGELOG.cdf-tk.md | 8 ++ CHANGELOG.templates.md | 2 + CONTRIBUTING.md | 66 ++++++----- cdf-tk-dev.py | 38 +++++++ cognite_toolkit/cdf.py | 104 ++++++++++-------- .../cognite_modules/common/README.md | 5 + .../common/cdf_auth_readwrite_all/README.md | 36 ++++-- .../cognite_modules/core/README.md | 6 + .../core/cdf_apm_base/README.md | 35 +++++- .../cognite_modules/examples/README.md | 11 +- .../cdf_apm_simple_data_model/README.md | 40 ++++++- .../examples/cdf_oid_example_data/README.md | 33 +++++- .../example_pump_asset_hierarchy/README.md | 36 +++++- ..._asset_hierarchy-load-collections_pump.sql | 8 +- .../cognite_modules/experimental/README.md | 7 +- .../cdf_asset_source_model/README.md | 36 +++++- .../example_pump_data_model/README.md | 40 ++++++- ...ield_sync_workorders_to_apm_activities.sql | 9 +- .../cdf_infield_location.yaml | 10 +- .../example_pump_asset_hierarchy.yaml | 2 +- 22 files changed, 414 insertions(+), 128 deletions(-) create mode 100755 cdf-tk-dev.py create mode 100644 cognite_toolkit/cognite_modules/common/README.md create mode 100644 cognite_toolkit/cognite_modules/core/README.md diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 92ca49445..44ef367fb 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -88,4 +88,4 @@ jobs: env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} - run: twine upload --verbose dist/* || echo 'Version exists' + run: twine upload --verbose dist/* diff --git a/.vscode/launch.json b/.vscode/launch.json index 9f3718403..517bd18e2 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -8,7 +8,7 @@ "name": "Python: build", "type": "python", "request": "launch", - "program": "./cognite_toolkit/cdf.py", + "program": "./cdf-tk-dev.py", "args": [ "--verbose", "--override-env", @@ -25,7 +25,7 @@ "name": "Python: deploy", "type": "python", "request": "launch", - "program": "./cognite_toolkit/cdf.py", + "program": "./cdf-tk-dev.py", "args": [ "deploy", "--dry-run", @@ -42,7 +42,7 @@ "name": "Python: clean", "type": "python", "request": "launch", - "program": "./cognite_toolkit/cdf.py", + "program": "./cdf-tk-dev.py", "args": [ "clean", //"--dry-run", @@ -56,7 +56,7 @@ "name": "Python: cdf.py", "type": "python", "request": "launch", - "program": "./cognite_toolkit/cdf.py", + "program": "./cdf-tk-dev.py", "args": [ "clean", "-r" diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index 19c92d677..d5f2ea2fe 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -16,15 +16,22 @@ Changes are grouped as follows: - `Security` in case of vulnerabilities. ## [TBD] - 2023-12-TBD + ### Added + - Warnings if a configuration file is using `snake_case` when then resource type is expecting `camelCase`. - Added support for validation of `space` for data models. - Check for whether template variables `` are present in the config files. - Check for whether data set id is present in the config files. - Print table at the end of `cdf-tk deploy` with the resources that were created, deleted, and skipped. +- Support for Extraction Pipelines and Extraction Pipeline configuration for remotely configured Extractors + ### Removed + - In the `deploy` command `drop_data` option has been removed. To drop data, use the `clean` command instead. + ### Changed + - Require all spaces to be explicitly defined as separate .space.yaml file. - The `data_set_id` for `Transformations` must now be set explicitly in the yaml config file for the `Transformation` under the `data_set_id` key. Note that you also need to explicitly define the `data_set` in its own yaml config file. @@ -34,6 +41,7 @@ Changes are grouped as follows: set explicitly in the yaml config file. ### Fixed + - When running `cdf-tk deploy` with `--dry-run` a `ValueError` was raised if not all datasets were pre-existing. This is now fixed by skipping dataset validation when running with `--dry-run`. - When having a `auth` group with mixed capabilities of all scoped and resource scoped, the all scoped capabilities diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index 544b948b3..eac0f2b7d 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -1,4 +1,5 @@ # Changelog + All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), @@ -21,6 +22,7 @@ Changes are grouped as follows: - Explicitly define model `space` in `experimental/cdf_asset_source_model/` and `experimental/example_pump_model/`. - The module `my_example_module` has been added to the `custom_modules` folder. - Added globally defined schedule variables that can be used across all modules. +- A complete example of an Asset data pipeline in `examples/cdf_asset_data_pipeline/` shows how to configure an Extractor, monitor the status of the Extraction Pipeline, and load the data into the asset hierarchy using Transformations. ### Changed diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f924589d4..d1767af39 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -21,15 +21,20 @@ cdf_infield_location is an example of a team-owned module. Adding a new module consists of the following steps: -1. Determine where to put it (common, modules, or examples) -2. Create a new directory for the module with sub-directories per configuration type the module needs -3. Add a `default.config.yaml` file to the module root directory if you have variables in the templates -4. Add a `README.md` file to the module root directory with a description of the module and variables -5. Update `default.packages.yaml` with the new module if it is part of a package -6. Add a description of the module in the [module and package documentation](../docs/overview.md) - -Each module should be as standalone as possible, but they can be dependent on either modules -in ./common or other modules in ./modules. If you need to deploy a data model as a foundational +1. Determine where to put it (core, common, modules, examples, or experimental). +2. Create a new directory for the module with sub-directories per configuration type the module needs. See the + [YAML reference documentation](https://developer.cognite.com/sdks/toolkit/references/configs). +3. Add a `default.config.yaml` file to the module root directory if you have variables in the templates. +4. Add a `README.md` file to the module root directory with a description of the module and variables. +5. Update `default.packages.yaml` in cognite_toolkit root with the new module if it is part of a package +6. If this is an official module, add a description of the module in the + [module and package documentation](https://developer.cognite.com/sdks/toolkit/references/module_reference). + +> If you are not a Cognite employee and would like to contribute a module, please open an issue, so we can +> get in touch with you. + +Each module should be as standalone as possible, but they can be dependent on other modules. +If you need to deploy a data model as a foundational element for both transformations and applications to work, you may add a module with the data model. However, a better module would be one that includes all the elements needed to get data from the source system, through RAW (if necessary), into a source data model, and then transformed by one or @@ -37,8 +42,8 @@ more transformations into a domain data model. The solution data models can then that relies on the ingestion module. Please take care to think about the best grouping of modules to make it easy to deploy and maintain. -We are aiming at standardizing as much as possible, so we do not optimize for project-specific -changes and naming conventions except where we design for it. +We are aiming at standardizing as much as possible, so we do not optimize for customer-specific +changes and naming conventions except where we design to support it. > NOTE! Customer-specific projects should be able to use these templates directly, and also adopt > new changes from this repository as they are released. @@ -47,37 +52,30 @@ changes and naming conventions except where we design for it. ## Data formats -All the configurations should be kept in YAML and in a format that is compatible with the CDF API. -Use either camelCase or snake_case, mixing is not supported. -The configuration files are loaded directly into the Python SDK's support data classes for direct -use towards the CDF API. No client side schema validation should be done to ensure that you can immediately +All the configurations should be kept in camelCase YAML and in a format that is compatible with the CDF API. +The configuration files are loaded directly into the Python SDK's support data classes for +use towards the CDF API. Client side schema validation should be done in the Python SDK and not in `cdf-tk` +to ensure that you can immediately add a yaml configuration property without upcoming anything else than the version of the Python SDK. > NOTE!! As of now, any non-recognised properties will just be ignored by the Python SDK. If you don't -> get the desired configuration deployed, check your spelling and use of snake_case vs camelCase. The Python SDK -> expects camelCase. +> get the desired configuration deployed, check your spelling. -## Tooling and scripts/ directory +The scripts currently support many resources like raw, data models, time series, groups, and transformations. +It also has some support for loading of data that may be used as example data for CDF projects. However, +as a general rule, templates should contain governed configurations necessary to set up ingest, data pipelines, +and contextualisations, but not the actual data itself. -We want to add client-side logic/validation as part of the deployment process, e.g. validation -of data models, transformations, contextualizations, etc to ensure integrity and proper -functioning configuration. We may in the future introduce more SDK and CDF server-side -validation. - -> NOTE!! The scripts currently support raw, data models, time series, groups, and transformations. -> It also has some support for loading of data that may be used as example data for CDF projects. However, -> to the extent possible, this repository should not contain data, only governed configurations. -> Of course, where data population of e.g. data model is part of the configuration, that is fine. -> The scripts are continuosly under development to simplify management of configurations, and -> we are pushing the functionality into the Python SDK when that makes sense. +Of course, where data population of e.g. data model is part of the configuration, that is fine. +The scripts are continuosly under development to simplify management of configurations, and +we are pushing the functionality into the Python SDK when that makes sense. ## Testing The `cdf_` prefixed modules should be tested as part of the product development. Our internal test framework for scenario based testing can be found in the Cognite private big-smoke repository. -> TODO Define how to make sure that modules get tested in big-smoke. - -The `cdf-tk deploy` script command will clean configurations before trying to load if you specify `--drop`, so you can -try to apply the configuration multiple times without having to clean up manually. There is also -a skeleton for a `cdf-tk clean` script command that will be used to clean up configurations using the scripts/delete.py functions. +The `cdf-tk deploy` script command will clean configurations if you specify `--drop`, so you can +try to apply the configuration multiple times without having to clean up manually. If you want to delete +everything that is governed by your templates, including data ingested into data models, the `cdf-tk clean` +script command can be used to clean up configurations using the scripts/delete.py functions. diff --git a/cdf-tk-dev.py b/cdf-tk-dev.py new file mode 100755 index 000000000..8cfe02a57 --- /dev/null +++ b/cdf-tk-dev.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +# You use this file to run the cdf-tk file for development purposes in Visual Studio Code +# to avoid installing the cognite-toolkit package. +# cdf.py is found inside the cognite-toolkit package, which is fine when you do pip install cognite-toolkit +# However, when you run the file in Visual Studio Code, the module should not be installed in your +# python virtual environment, but rather be found in the root of the repo. +# This workaround allows you to run cdf.py in Visual Studio Code like this: +""" { + "name": "Python: build", + "type": "python", + "request": "launch", + "program": "./cdf-tk-dev.py", + "args": [ + "--verbose", + "--override-env", + "build", + "--build-dir=build", + "--clean", + "--env=local", + "./cognite_toolkit/" + ], + "console": "integratedTerminal", + "justMyCode": false + }, +""" + +import sys +from pathlib import Path + +root_folder = rf"{Path(Path(__file__).parent.absolute())}" + +sys.path.append(root_folder) + +from cognite_toolkit.cdf import app # noqa: E402 + +if __name__ == "__main__": + app() diff --git a/cognite_toolkit/cdf.py b/cognite_toolkit/cdf.py index 7a73692b6..634e7814a 100755 --- a/cognite_toolkit/cdf.py +++ b/cognite_toolkit/cdf.py @@ -68,31 +68,43 @@ def common( verbose: Annotated[ bool, typer.Option( - help="Turn on to get more verbose output", + help="Turn on to get more verbose output when running the commands", ), ] = False, override_env: Annotated[ bool, typer.Option( - help="Use .env file to override current environment variables", + help="Load the .env file in this or the parent directory, but also override currently set environment variables", ), ] = False, cluster: Annotated[ Optional[str], typer.Option( envvar="CDF_CLUSTER", - help="Cognite Data Fusion cluster to use", + help="The Cognite Data Fusion cluster to use. Can also be set with the CDF_CLUSTER environment variable.", ), ] = None, project: Annotated[ Optional[str], typer.Option( envvar="CDF_PROJECT", - help="Cognite Data Fusion project to use", + help="The Cognite Data Fusion project to use. Can also be set with the CDF_PROJECT environment variable.", ), ] = None, - version: bool = typer.Option(None, "--version", callback=_version_callback), + version: bool = typer.Option( + None, + "--version", + help="See which version of the tooklit and the templates are installed.", + callback=_version_callback, + ), ): + """The cdf-tk tool is used to build and deploy Cognite Data Fusion project configurations from the command line or through CI/CD pipelines. + + Each of the main commands has a separate help, e.g. `cdf-tk build --help` or `cdf-tk deploy --help`. + + You can find the documation at https://developer.cognite.com/sdks/toolkit/ + and the template reference documentation at https://developer.cognite.com/sdks/toolkit/references/configs + """ if ctx.invoked_subcommand is None: print( "[bold]A tool to manage and deploy Cognite Data Fusion project configurations from the command line or through CI/CD pipelines.[/]" @@ -103,7 +115,7 @@ def common( if override_env: print(" [bold yellow]WARNING:[/] Overriding environment variables with values from .env file...") if cluster is not None or project is not None: - print(" --cluster or --project are set and will override .env file values.") + print(" --cluster or --project is set and will override .env file values.") if not (Path.cwd() / ".env").is_file(): if not (Path.cwd().parent / ".env").is_file(): print("[bold yellow]WARNING:[/] No .env file found in current or parent directory.") @@ -159,8 +171,8 @@ def build( ), ] = False, ) -> None: - source_dir = Path(source_dir) """Build configuration files from the module templates to a local build directory.""" + source_dir = Path(source_dir) if not source_dir.is_dir(): print(f" [bold red]ERROR:[/] {source_dir} does not exist") exit(1) @@ -196,7 +208,7 @@ def deploy( build_dir: Annotated[ Optional[str], typer.Argument( - help="Where to find the module templates to deploy from", + help="Where to find the module templates to deploy from. Defaults to current directory.", allow_dash=True, ), ] = "./build", @@ -205,7 +217,7 @@ def deploy( typer.Option( "--env", "-e", - help="Build environment to build for", + help="CDF project environment to build for. Defined in environments.yaml. Defaults to dev.", ), ] = "dev", interactive: Annotated[ @@ -213,7 +225,7 @@ def deploy( typer.Option( "--interactive", "-i", - help="Whether to use interactive mode when deciding which modules to deploy", + help="Whether to use interactive mode when deciding which modules to deploy.", ), ] = False, drop: Annotated[ @@ -221,7 +233,7 @@ def deploy( typer.Option( "--drop", "-d", - help="Whether to drop existing configurations, drop per resource if present", + help="Whether to drop existing configurations, drop per resource if present.", ), ] = False, dry_run: Annotated[ @@ -229,7 +241,7 @@ def deploy( typer.Option( "--dry-run", "-r", - help="Whether to do a dry-run, do dry-run if present", + help="Whether to do a dry-run, do dry-run if present.", ), ] = False, include: Annotated[ @@ -237,11 +249,11 @@ def deploy( typer.Option( "--include", "-i", - help=f"Specify which resources to deploy, available options: {_AVAILABLE_DATA_TYPES}", + help=f"Specify which resources to deploy, available options: {_AVAILABLE_DATA_TYPES}.", ), ] = None, ) -> None: - """Deploy one or more configuration types from the built configrations to a CDF environment of your choice (as set in local.yaml).""" + """Deploy one or more resource types from the built configurations to a CDF project environment of your choice (as set in environments.yaml).""" # Override cluster and project from the options/env variables if ctx.obj.mockToolGlobals is not None: ToolGlobals = ctx.obj.mockToolGlobals @@ -285,7 +297,7 @@ def deploy( results = DeployResults([], "deploy", dry_run=dry_run) if "auth" in include and (directory := (Path(build_dir) / "auth")).is_dir(): # First, we need to get all the generic access, so we can create the rest of the resources. - print("[bold]EVALUATING auth resources with ALL scope...[/]") + print("[bold]EVALUATING auth resources (groups) with ALL scope...[/]") result = deploy_or_clean_resources( AuthLoader.create_loader(ToolGlobals, target_scopes="all_scoped_skipped_validation"), directory, @@ -293,7 +305,7 @@ def deploy( ) results.append(result) if ToolGlobals.failed: - print("[bold red]ERROR: [/] Failure to deploy auth as expected.") + print("[bold red]ERROR: [/] Failure to deploy auth (groups) with ALL scope as expected.") exit(1) for LoaderCls in TopologicalSorter(selected_loaders).static_order(): result = deploy_or_clean_resources( @@ -319,7 +331,7 @@ def deploy( results.append(result) print(results.create_rich_table()) if ToolGlobals.failed: - print("[bold red]ERROR: [/] Failure to deploy auth as expected.") + print("[bold red]ERROR: [/] Failure to deploy auth (groups) scoped to resources as expected.") exit(1) @@ -329,7 +341,7 @@ def clean( build_dir: Annotated[ Optional[str], typer.Argument( - help="Where to find the module templates to clean from", + help="Where to find the module templates to clean from. Defaults to ./build directory.", allow_dash=True, ), ] = "./build", @@ -338,7 +350,7 @@ def clean( typer.Option( "--env", "-e", - help="Build environment to clean for", + help="CDF project environment to use for cleaning.", ), ] = "dev", interactive: Annotated[ @@ -346,7 +358,7 @@ def clean( typer.Option( "--interactive", "-i", - help="Whether to use interactive mode when deciding which modules to clean", + help="Whether to use interactive mode when deciding which resource types to clean.", ), ] = False, dry_run: Annotated[ @@ -362,11 +374,11 @@ def clean( typer.Option( "--include", "-i", - help=f"Specify which resources to deploy, supported types: {_AVAILABLE_DATA_TYPES}", + help=f"Specify which resource types to deploy, supported types: {_AVAILABLE_DATA_TYPES}", ), ] = None, ) -> None: - """Clean up a CDF environment as set in local.yaml based on the configuration files in the build directory.""" + """Clean up a CDF environment as set in environments.yaml restricted to the entities in the configuration files in the build directory.""" # Override cluster and project from the options/env variables if ctx.obj.mockToolGlobals is not None: ToolGlobals = ctx.obj.mockToolGlobals @@ -454,7 +466,7 @@ def auth_verify( typer.Option( "--dry-run", "-r", - help="Whether to do a dry-run, do dry-run if present", + help="Whether to do a dry-run, do dry-run if present.", ), ] = False, interactive: Annotated[ @@ -462,7 +474,7 @@ def auth_verify( typer.Option( "--interactive", "-i", - help="Will run the verification in interactive mode, prompting for input", + help="Will run the verification in interactive mode, prompting for input. Used to bootstrap a new project.", ), ] = False, group_file: Annotated[ @@ -470,7 +482,7 @@ def auth_verify( typer.Option( "--group-file", "-f", - help="Group yaml configuration file to use for group verification", + help="Path to group yaml configuration file to use for group verification. Defaults to readwrite.all.group.yaml from the cdf_auth_readwrite_all common module.", ), ] = f"/{COGNITE_MODULES}/common/cdf_auth_readwrite_all/auth/readwrite.all.group.yaml", update_group: Annotated[ @@ -478,7 +490,7 @@ def auth_verify( typer.Option( "--update-group", "-u", - help="Used to update an existing group with the configurations from the configuration file. Set to the group id to update or 1 to update the only available group", + help="Used to update an existing group with the configurations from the configuration file. Set to the group id to update or 1 to update the default write-all group (if the tool is only member of one group).", ), ] = 0, create_group: Annotated[ @@ -486,15 +498,18 @@ def auth_verify( typer.Option( "--create-group", "-c", - help="Used to create a new group with the configurations from the configuration file. Set to the source id that the new group should be configured with", + help="Used to create a new group with the configurations from the configuration file. Set to the source id that the new group should be configured with.", ), ] = None, ): - """When you have a CDF_TOKEN or a pair of CDF_CLIENT_ID and CDF_CLIENT_SECRET for a CDF project, - you can use this command to verify that the token has the correct access rights to the project. + """When you have the necessary information about your identity provider configuration, + you can use this command to configure the tool and verify that the token has the correct access rights to the project. It can also create a group with the correct access rights, defaulting to write-all group meant for an admin/CICD pipeline. + As a minimum, you need the CDF project name, the CDF cluster, an identity provider token URL, and a service account client ID + and client secret (or an OAuth2 token set in CDF_TOKEN environment variable). + Needed capabilites for bootstrapping: "projectsAcl": ["LIST", "READ"], "groupsAcl": ["LIST", "READ", "CREATE", "UPDATE", "DELETE"] @@ -534,7 +549,7 @@ def main_init( typer.Option( "--dry-run", "-r", - help="Whether to do a dry-run, do dry-run if present", + help="Whether to do a dry-run, do dry-run if present.", ), ] = False, upgrade: Annotated[ @@ -542,7 +557,7 @@ def main_init( typer.Option( "--upgrade", "-u", - help="Will upgrade templates in place without overwriting config.yaml files", + help="Will upgrade templates in place without overwriting existing config.yaml and other files.", ), ] = False, git: Annotated[ @@ -557,24 +572,24 @@ def main_init( Optional[bool], typer.Option( "--no-backup", - help="Will skip making a backup before upgrading", + help="Will skip making a backup before upgrading.", ), ] = False, clean: Annotated[ Optional[bool], typer.Option( "--clean", - help="Will delete the new_project directory before starting", + help="Will delete the new_project directory before starting.", ), ] = False, init_dir: Annotated[ Optional[str], typer.Argument( - help="Directory to initialize with templates", + help="Directory path to project to initialize or upgrade with templates.", ), ] = "new_project", ): - """Initialize a new CDF project with templates.""" + """Initialize or upgrade a new CDF project with templates.""" files_to_copy = [] dirs_to_copy = [] @@ -632,9 +647,10 @@ def main_init( zip_path, _ = urllib.request.urlretrieve(toolkit_github_url) with zipfile.ZipFile(zip_path, "r") as f: f.extractall(extract_dir) - except Exception as e: + except Exception: print( - f"Failed to download templates. Are you sure that the branch {git} exists in the repository?\n{e}" + f"Failed to download templates. Are you sure that the branch {git} exists in" + + "the https://github.com/cognitedata/cdf-project-templatesrepository?\n{e}" ) exit(1) template_dir = Path(extract_dir) / f"cdf-project-templates-{git}" / "cognite_toolkit" @@ -680,19 +696,19 @@ def main_init( shutil.rmtree(extract_dir) if not dry_run: if upgrade: - print(f"Project in {target_dir} was upgraded.") + print(f"You project in {target_dir} was upgraded.") else: - print(f"New project created in {target_dir}.") + print(f"A new project was created in {target_dir}.") if upgrade: - print(" All default.config.yaml files in the modules have been upgraded.") - print(" Your config.yaml files may need to be updated to override new default variables.") + print(" All default variables from the modules have been upgraded.") + print(" Please check you config.yaml file for new default variables that may need to be changed.") config_filepath = target_dir / "config.yaml" if not dry_run: if clean or not config_filepath.exists(): config_str, _ = generate_config(target_dir) config_filepath.write_text(config_str) - print(f"Created config.yaml file in {target_dir}.") + print(f"Created your config.yaml file in {target_dir}.") else: current = config_filepath.read_text() config_str, difference = generate_config(target_dir, existing_config=current) @@ -703,7 +719,7 @@ def main_init( def _process_include(include: Optional[list[str]], interactive: bool) -> list[str]: if include and (invalid_types := set(include).difference(_AVAILABLE_DATA_TYPES)): print( - f" [bold red]ERROR:[/] Invalid data types specified: {invalid_types}, available types: {_AVAILABLE_DATA_TYPES}" + f" [bold red]ERROR:[/] Invalid resource types specified: {invalid_types}, available types: {_AVAILABLE_DATA_TYPES}" ) exit(1) include = include or list(_AVAILABLE_DATA_TYPES) @@ -719,7 +735,7 @@ def _select_data_types(include: Sequence[str]) -> list[str]: mapping[i] = datatype print("\na) All") print("q) Quit") - answer = input("Select data types to include: ") + answer = input("Select resource types to include: ") if answer.casefold() == "a": return list(include) elif answer.casefold() == "q": diff --git a/cognite_toolkit/cognite_modules/common/README.md b/cognite_toolkit/cognite_modules/common/README.md new file mode 100644 index 000000000..53d1c544e --- /dev/null +++ b/cognite_toolkit/cognite_modules/common/README.md @@ -0,0 +1,5 @@ +# COMMON modules + +The common directory contains modules that define resources that have an impact across an entire CDF project. +Typically these are related to authentication and authorization, as well as other global configurations like +global data models etc. diff --git a/cognite_toolkit/cognite_modules/common/cdf_auth_readwrite_all/README.md b/cognite_toolkit/cognite_modules/common/cdf_auth_readwrite_all/README.md index 4b1d2165b..eaff9a65e 100644 --- a/cognite_toolkit/cognite_modules/common/cdf_auth_readwrite_all/README.md +++ b/cognite_toolkit/cognite_modules/common/cdf_auth_readwrite_all/README.md @@ -1,12 +1,34 @@ # Module: cdf_auth_readwrite_all -This module is used to create: +This is a foundational module used by the `cdf-tk` tool as the default +auth module for read-write access to all CDF resources for the tool itself (admin or CI/CD pipeline), +as well as default read-only access for admin access in the UI. -1. a group with read write access to everything in a CDF project (for a CI/CD pipeline) -2. a group with read-only access (for viewing configurations from UI) -3. a default group for infield that currently is needed to see the UI config menu +This structure is based on the concept of ONLY the tool having write access to the entities +that are controlled by the templates. Everybody else should either have no access or read-only access. -This module can be used for production as is. +## Managed resources -It currently uses the following global configuration variables: -demo_readwrite_source_id and demo_readonly_source_id. \ No newline at end of file +This module manages the following resources: + +1. a group with read-write access (`gp_cicd_all_read_write`) to everything in a CDF project (for `cdf-tk` as an admin tool or + through a CI/CD pipeline). +2. a group with read-only access `gp_cicd_all_read_only` (for viewing configurations from UI). + +## Variables + +The following variables are required and defined in this module: + +| Variable | Description | +|----------|-------------| +|readwrite_source_id| The source ID of the group that should be granted read-write access to all resources in the project. | +|readonly_source_id| The source ID of the group that should be granted read-only access to all resources in the project. | + +## Usage + +The `gp_cicd_all_read_write` group is used default by the `cdf-tk auth verify` command to verify correct access to resources in +a project. The groups are default part of several packages that are created by the `cdf-tk` tool. + +If you have different needs for the readwrite and readonly groups, you can copy this module into `custom_modules`, rename +it (remove the cdf_ prefix), and change which modules are deployed in your `environments.yaml` file. You can also +use the `cdf-tk verify --group-file=/path/to/group.yaml` command to switch out the default group file with your own. diff --git a/cognite_toolkit/cognite_modules/core/README.md b/cognite_toolkit/cognite_modules/core/README.md new file mode 100644 index 000000000..a18b86639 --- /dev/null +++ b/cognite_toolkit/cognite_modules/core/README.md @@ -0,0 +1,6 @@ +# CORE modules + +This directory contains modules that are fundamental to many other packages and modules. The difference between +core and common modules is that the common modules can typically be replaced by custom replacements that +are project specific. The core modules SHOULD NOT be customised and should be seen as a mandatory prerequisite for +other modules relying on them. diff --git a/cognite_toolkit/cognite_modules/core/cdf_apm_base/README.md b/cognite_toolkit/cognite_modules/core/cdf_apm_base/README.md index 5b91e9056..698412070 100644 --- a/cognite_toolkit/cognite_modules/core/cdf_apm_base/README.md +++ b/cognite_toolkit/cognite_modules/core/cdf_apm_base/README.md @@ -1,7 +1,34 @@ # cdf_apm_base -This module contains the data model configurations necessary for all Cognite Asset Performance -Management (APM) projects. The global data models pre-loaded in all CDF projects are -leveraged and extended by this module. +The `cdf_apm_base` module manages the basic set of data models needed for +all Asset Performance Management use cases, as well as Infield and Maintain. -It is a requirement for both Infield, Maintain, and APM projects. \ No newline at end of file +The current version of the module is targeted and validated for Infield v2. + +## Managed resources + +This module manages the following resources: + +1. A set of containers built on the system models that are used to store core entities in maintenance. +2. A set of views that offers access to these core entities. +3. A data model that can be used to access the views and organises the relationships between the entities. +4. A space where all these entities are created + +## Variables + +The following variables are required and defined in this module: + +| Variable | Description | +|----------|-------------| +| apm_datamodel_space| The space where this data model should be created. | +| apm_datamodel_version| The version of the data model that should be created. | + +> DO NOT CHANGE these variables unless you know what you are doing! + +## Usage + +Other packages like cdf_infield use this module to create the basic data model before installing more +location and customer specific configurations on top. + +You can install this module for other maintenanance and asset performance use cases by just including +the module name under `deploy:` in your `environments.yaml` file. diff --git a/cognite_toolkit/cognite_modules/examples/README.md b/cognite_toolkit/cognite_modules/examples/README.md index 3239dd803..c68435e76 100644 --- a/cognite_toolkit/cognite_modules/examples/README.md +++ b/cognite_toolkit/cognite_modules/examples/README.md @@ -1,9 +1,10 @@ -# Example modules +# EXAMPLE modules This directory contains modules that are meant as examples or starting points for your own modules. You -should copy and rename an example module into the modules directory (remove any `cdf_` prefixes) and make -your own modifications to it. +should copy and rename an example module into the `custom_modules` directory (remove any `cdf_` prefixes) and make +your own modifications. You should then update the `deploy:` section in your `environments.yaml` file to install +the module. Some of these modules also contain data that you can use to quickly get started without ingesting data -into CDF. The cdf_apm_simple is a good example of this. It contains a small data set from Open Industrial -Data, the Valhall platform. +into CDF. The cdf_apm_simple module is a good example of this. It contains a small data set from [Open Industrial +Data](https://learn.cognite.com/open-industrial-data), the Valhall platform. diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/README.md b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/README.md index 5459baeb3..0fbd91c52 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/README.md +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/README.md @@ -1,9 +1,43 @@ -# Module: cdf_apm_simple_data_model +# cdf_apm_simple_data_model -This module relies on cdf_oid_example_data being loaded to a RAW database. -The module creates a simple Asset Performance Management data model and +This module relies on cdf_oid_example_data being loaded. +The module creates a simple example Asset Performance Management data model and adds the necessary transformations to populate the model with data from the Open Industrial Data data in cdf_oid_example_data. +## Managed resources + +This module manages the following resources: + +1. a group with read-write access (`gp_cicd_all_read_write`) to everything in a CDF project (for `cdf-tk` as an admin tool or + through a CI/CD pipeline). +2. a group with read-only access `gp_cicd_all_read_only` (for viewing configurations from UI). + +## Variables + +The following variables are required and defined in this module: + +| Variable | Description | +|----------|-------------| +| default_location | Name of the location, default oid (Open Industrial Data) | +| source_asset| The name of the source system where assets originate from, default: workmate| +| source_workorder| The name of the source system where the workorders orginate from, default workmate| +| source_timeseries| The name of the source system where the timeseries originate from, default: pi| +| datamodel | The name of the data model to create, default: apm_simple | +| space | The space where the data model entities should be created, default: apm_simple | +| datamodel_version | The version to use when creating this data model. If you do modifications, you can bump up the version. | +| view_Asset_version | The version to use on the Asset view. | +| view_WorkOrder_version| The version to use on the Workorder view. | +| view_WorkItem_version | The version to use on the WorkItem view. | +| pause_transformations | Whether transformations should be created as paused. | + +> Note! The `source_asset`, `source_workorder`, and `source_timeseries` variables need to match the corresponding +> variables in the cdf_oid_example_data module as this module uses the RAW tables from that module. + +## Usage + +This module is not meant for production purposes. It is meant as an example to illustrate how you can create +a data model and populate it with data from the Open Industrial Data data set (cdf_oid_example_data). + It can be used standalone, but the transformations will then not be able to run without modifications. diff --git a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/README.md b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/README.md index a63a5ee56..8cbee3b47 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/README.md +++ b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/README.md @@ -1,9 +1,9 @@ -# cdf_oid_example_data module +# cdf_oid_example_data ## About the module The module has a basic data set that can be used as example data for many other modules and -purposes. +purposes. It is used by other packages and modules like cdf_infield_location. ## About the data @@ -21,3 +21,32 @@ Below is a snippet from the Cognite Hub website describing the data: >By sharing this live stream of industrial data freely, Aker BP and Cognite hope to accelerate innovation within data-heavy fields. This includes predictive maintenance, condition > monitoring, and advanced visualization techniques, as well as other new, unexpected applications. Advancement in these areas will directly benefit Aker BP’s operations and will also >improve the health and outlook of the industrial ecosystem on the Norwegian Continental Shelf. + +## Managed resources + +This module manages the following resources: + +1. Datasets for each resource type in the data set named based on the location name (default:oid). +2. A set of Process & Instrumentation diagrams to be uploaded as files. +3. Pre-structured data in RAW for assets, timeseries, workorders, and workitems that are suitable for creating relationships + between the different resources. These are structured into one database per source system. +4. A set of timeseries (but no datapoints) for the different assets. +5. A transformation that moves assets from the asset RAW table into the classic asset hierarchy. + +## Variables + +The following variables are required and defined in this module: + +| Variable | Description | +|----------|-------------| +| default_location| The default location name to use for the data set. We use default oid (Open Industrial Data) | +| source_asset| The name of the source system where assets originate from, default: workmate| +| source_workorder| The name of the source system where the workorders orginate from, default workmate| +| source_files| The name of the source system where the files originate from, default: workmate| +| source_timeseries| The name of the source system where the timeseries originate from, default: pi| + +## Usage + +If you want to create a project with example data, you can either specify this module in your `environment.yml` file or +you can copy it to `custom_modules`, change the name (remove the cdf_ prefix), and replace the data with your own in the +various sub-directories. diff --git a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/README.md b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/README.md index 043607974..f78d92d6c 100644 --- a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/README.md +++ b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/README.md @@ -1,13 +1,37 @@ -# Module example_pump_assets +# example_pump_asset_hierarchy -This module contains an example of source data for pumps and their associated assets with the transformations -into CDF Assets. +This module contains an example of source data for pumps and their associated assets with a transformation that moves the data +into the CDF classic asset hierarchy. -The dataset is taken from [Collections Pump](https://data.bendoregon.gov/maps/collections-pump). +## About the data + +The dataset is from [Collections Pump](https://data.bendoregon.gov/maps/collections-pump). From the source: -``` + +```txt The purpose of this dataset is to show the line configurations associated with the pumps operating at the lift stations around the City's Collection system. ``` -This is the basis for the practical guide to data modeling in CDF found in the [Cognite Documentation](https://pr-1865.docs.preview.cogniteapp.com/cdf/dm/dm_guides/dm_create_asset_hierarchy). +This is the basis for the practical guide to data modeling in CDF found in the [Cognite Documentation](https://docs.cognite.com/cdf/dm/dm_guides/dm_create_asset_hierarchy). + +## Managed resources + +This module manages the following resources: + +* A dataset to use when ingesting data into the asset hierarchy. +* A RAW table with the pump data. +* A transformation that moves the pump data into the asset hierarchy. + +## Variables + +The following variables are required and defined in this module: + +| Variable | Description | +|----------|-------------| +| raw_db | The name of the RAW database to use, default: `pump_assets` | +| data_set | The name of the dataset to use when ingesting into the asset hierarchy, default: `src:lift_pump_stations` | + +## Usage + +This module can be used standalone, but it is meant as example data to be used with the `example_pump_data_model` module. diff --git a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.sql b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.sql index ac1c7d7d5..fc8553559 100644 --- a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.sql +++ b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.sql @@ -1,7 +1,7 @@ --- 1. asset root (defining all columns) SELECT "Lift Pump Stations" AS name, - dataset_id("src:lift_pump_stations") AS dataSetId, + dataset_id("{{data_set}}") AS dataSetId, "lift_pump_stations:root" AS externalId, '' as parentExternalId, "An example pump dataset" as description, @@ -11,7 +11,7 @@ UNION ALL --- 2. Lift Stations select s.lift_station as name, - dataset_id("src:lift_pump_stations") AS dataSetId, + dataset_id("{{data_set}}") AS dataSetId, concat("lift_station:", lower(replace(s.lift_station, ' ', '_'))) as externalId, 'lift_pump_stations:root' as parentExternalId, null as description, @@ -27,7 +27,7 @@ UNION ALL --- 3. Pumps SELECT concat("Pump ", PumpModel) as name, - dataset_id("src:lift_pump_stations") AS dataSetId, + dataset_id("{{data_set}}") AS dataSetId, GlobalID as externalId, concat("lift_station:", lower(replace(LiftStationID, ' ', '_'))) as parentExternalId, Comments as description, @@ -56,4 +56,4 @@ SELECT LifeCycleStatus, LocationDescription ) as metadata -from pump_assets.`collections_pump` +from `{{raw_db}}`.`collections_pump` diff --git a/cognite_toolkit/cognite_modules/experimental/README.md b/cognite_toolkit/cognite_modules/experimental/README.md index 688666428..9cc660a58 100644 --- a/cognite_toolkit/cognite_modules/experimental/README.md +++ b/cognite_toolkit/cognite_modules/experimental/README.md @@ -1,4 +1,5 @@ -# Experimental module folder -This folder contains experimental modules. These modules are not supported by Cognite and are not guaranteed to work. +# EXPERIMENTAL modules -They are used to test new features and ideas. They may be removed at any time. +This folder contains experimental modules. These modules are not supported by Cognite. + +They are used to test new features and ideas. They may be removed at any time or promoted to a stable module. diff --git a/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/README.md b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/README.md index 946881e2c..43de8e0e2 100644 --- a/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/README.md +++ b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/README.md @@ -1,5 +1,39 @@ -# Module cdf_asset_source_model +# cdf_asset_source_model This module contains an Asset source model that illustrates how to create a customer-extended Asset that is correctly tied to the global Asset source model. Follow this pattern to ensure that you can easily upgrade to new versions of the Asset source model. + +## Managed resources + +This module manages the following resources: + +* A space to hold instances ingested from the asset hierarchy. +* A space to hold the model entities for the extended asset. +* A container for extended asset properties. +* A view for the extended asset. +* A data model for the extended asset. +* A transformation that populates the data from the asset hierarchy into the model. + +## Variables + +The following variables are required and defined in this module: + +| Variable | Description | +|----------|-------------| +| model_space | The space to create the extended asset model in. | +| instance_space | The space where instances should be created. | +| view_asset_version | The version to use on the extended asset view. | +| data_model_version | The name of the data model for the extended asset. | +| root_asset_external_id| The external id of the root asset in the asset hierarchy. | + +## Usage + +The `example_pump_asset_hierarchy` module contains example data that can be used with this module. +That module loads data from RAW into the classic asset hierarchy. + +This module creates an extended asset data model and the transformation populates that model with +data from the asset hierarchy. + +Next, you may want to look at the example_pump_data_model module that shows how to extend the +data model even further to sup-types of the extended asset. diff --git a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/README.md b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/README.md index e00b8da3a..b1642403b 100644 --- a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/README.md +++ b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/README.md @@ -1,4 +1,40 @@ -# Module example_pump_data_model +# example_pump_data_model -This module contains the data model and transformation for a Lift stations with pumps. +This module contains the data model and transformation for Lift stations with pumps. +It shows how to extend the system data models for assets to represent custom +properties for lift stations and pumps, as well as how to transform data from a generic +asset space/model. +## Managed resources + +This module manages the following resources: + +* Two spaces for both the model and instances. +* A container for pump asset properties. +* Two views for pump and liftstation. +* A data model for the pump and liftstation. + +## Variables + +The following variables are required and defined in this module: + +| Variable | Description | +|----------|-------------| +| model_space | Space for the data models. | +| instance_space | Space to store instances. | +| source_model_space | Space to find source assets to ingest from. | +| source_model | Which data model to use to ingest asset data from. | +| view_Pump_version | Version to use on the pump view. | +| view_LiftStation_version | Version to use on the liftstation view. | +| data_model_version | Version to use on the LiftStation data model. | +| data_model | The name to use for the LiftStaion data model. | + +## Usage + +The `example_pump_asset_hierarchy` module contains example data that can be used with this module. + +The `cdf_asset_source_model` model shows how to extend the system data model for assets to represent custom properties, +as well as how to transform data from the classic asset hierarchy into the extended asset model. + +Finally, this module shows how to extend the data model even further to sub-types of the extended asset and how to +categorise the assets from the `source_model` found in the `source_model_space`. diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.sql b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.sql index 6487df21a..f936d2d97 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.sql +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.sql @@ -4,9 +4,12 @@ cast(`key` as STRING) as id, cast(`status` as STRING) as status, /* cast(`startTime` as TIMESTAMP) as startTime, - cast(`endTime` as TIMESTAMP) as endTime,*/ - cast('2023-12-01T09:00:00' as TIMESTAMP) as startTime, - cast('2023-12-06T09:00:00' as TIMESTAMP) as endTime, + cast(`endTime` as TIMESTAMP) as endTime, + NOTE!!! The below two datas just updates all workorders to be from now + and into the future. This is done for the sake of the demo data. + */ + cast(current_date() as TIMESTAMP) as startTime, + cast(date_add(current_date(), 7) as TIMESTAMP) as endTime, cast(`title` as STRING) as title, '{{root_asset_external_id}}' as rootLocation, 'workmate' as source diff --git a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml index 1ea6ae726..c969f3740 100644 --- a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml +++ b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml @@ -488,10 +488,12 @@ Transformation: query: " select\n cast(`externalId` as STRING) as externalId,\n cast(`description`\ \ as STRING) as description,\n cast(`key` as STRING) as id,\n cast(`status`\ \ as STRING) as status,\n /* cast(`startTime` as TIMESTAMP) as startTime,\n\ - \ cast(`endTime` as TIMESTAMP) as endTime,*/\n cast('2023-12-01T09:00:00'\ - \ as TIMESTAMP) as startTime,\n cast('2023-12-06T09:00:00' as TIMESTAMP) as\ - \ endTime,\n cast(`title` as STRING) as title,\n 'WMT:VAL' as rootLocation,\n\ - \ 'workmate' as source\n from\n `workorder_oid_workmate`.`workorders`;\n" + \ cast(`endTime` as TIMESTAMP) as endTime,\n NOTE!!! The below two datas\ + \ just updates all workorders to be from now \n and into the future. This is\ + \ done for the sake of the demo data.\n */\n cast(current_date() as TIMESTAMP)\ + \ as startTime,\n cast(date_add(current_date(), 7) as TIMESTAMP) as endTime,\n\ + \ cast(`title` as STRING) as title,\n 'WMT:VAL' as rootLocation,\n 'workmate'\ + \ as source\n from\n `workorder_oid_workmate`.`workorders`;\n" schedule: externalId: tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities interval: 7 * * * * diff --git a/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml b/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml index a356487d2..133bfb013 100644 --- a/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml +++ b/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml @@ -74,7 +74,7 @@ Transformation: \ PumpNumber,\n PumpHP,\n HighHeadShutOff,\n DesignPointHeadFT,\n DesignPointFlowGPM,\n\ \ LowHeadFT,\n LowHeadFlowGPM,\n PumpControl,\n PumpModel,\n Shape__Length,\n\ \ Enabled,\n DesignPointHeadFT,\n LowHeadFT,\n FacilityID,\n InstallDate,\n\ - \ LifeCycleStatus,\n LocationDescription\n ) as metadata\nfrom pump_assets.`collections_pump`\n" + \ LifeCycleStatus,\n LocationDescription\n ) as metadata\nfrom `pump_assets`.`collections_pump`\n" schedule: externalId: pump_asset_hierarchy-load-collections_pump interval: 7 * * * * From 07b9a7883c388ac39747b6ff845ff38ae55b4e76 Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Thu, 14 Dec 2023 17:19:21 +0100 Subject: [PATCH 68/90] Set client name for logging (#221) * refactor: Set client name for logging * tests: updated tests --- cognite_toolkit/cdf.py | 18 +++--------------- cognite_toolkit/cdf_tk/utils.py | 11 ++++------- demo/postproc.py | 2 +- tests/test_cdf_tk/test_utils.py | 9 ++++----- 4 files changed, 12 insertions(+), 28 deletions(-) diff --git a/cognite_toolkit/cdf.py b/cognite_toolkit/cdf.py index 634e7814a..7ffb7b1cb 100755 --- a/cognite_toolkit/cdf.py +++ b/cognite_toolkit/cdf.py @@ -258,11 +258,7 @@ def deploy( if ctx.obj.mockToolGlobals is not None: ToolGlobals = ctx.obj.mockToolGlobals else: - ToolGlobals = CDFToolConfig( - client_name="cdf-project-templates", - cluster=ctx.obj.cluster, - project=ctx.obj.project, - ) + ToolGlobals = CDFToolConfig(cluster=ctx.obj.cluster, project=ctx.obj.project) # Set environment variables from local.yaml read_environ_config(root_dir=build_dir, build_env=build_env, set_env_only=True) @@ -383,11 +379,7 @@ def clean( if ctx.obj.mockToolGlobals is not None: ToolGlobals = ctx.obj.mockToolGlobals else: - ToolGlobals = CDFToolConfig( - client_name="cdf-project-templates", - cluster=ctx.obj.cluster, - project=ctx.obj.project, - ) + ToolGlobals = CDFToolConfig(cluster=ctx.obj.cluster, project=ctx.obj.project) # Set environment variables from local.yaml read_environ_config(root_dir=build_dir, build_env=build_env, set_env_only=True) @@ -522,11 +514,7 @@ def auth_verify( if ctx.obj.mockToolGlobals is not None: ToolGlobals = ctx.obj.mockToolGlobals else: - ToolGlobals = CDFToolConfig( - client_name="cdf-project-templates", - cluster=ctx.obj.cluster, - project=ctx.obj.project, - ) + ToolGlobals = CDFToolConfig(cluster=ctx.obj.cluster, project=ctx.obj.project) bootstrap.check_auth( ToolGlobals, group_file=group_file, diff --git a/cognite_toolkit/cdf_tk/utils.py b/cognite_toolkit/cdf_tk/utils.py index 7306f812f..04e31e909 100644 --- a/cognite_toolkit/cdf_tk/utils.py +++ b/cognite_toolkit/cdf_tk/utils.py @@ -39,6 +39,7 @@ from cognite.client.utils._text import to_camel_case, to_snake_case from rich import print +from cognite_toolkit._version import __version__ from cognite_toolkit.cdf_tk._get_type_hints import _TypeHints logger = logging.getLogger(__name__) @@ -55,13 +56,7 @@ class CDFToolConfig: """ - def __init__( - self, - client_name: str = "Generic Cognite config deploy tool", - token: str | None = None, - cluster: str | None = None, - project: str | None = None, - ) -> None: + def __init__(self, token: str | None = None, cluster: str | None = None, project: str | None = None) -> None: self._data_set_id: int = 0 self._data_set = None self._failed = False @@ -74,6 +69,8 @@ def __init__( client_secret="", scopes=[], ) + # ClientName is used for logging usage of the CDF-Toolkit. + client_name = f"CDF-Toolkit:{__version__}" # CDF_CLUSTER and CDF_PROJECT are minimum requirements and can be overridden # when instansiating the class. diff --git a/demo/postproc.py b/demo/postproc.py index f82d03323..475a58d92 100755 --- a/demo/postproc.py +++ b/demo/postproc.py @@ -18,7 +18,7 @@ def run() -> None: print("Doing post-processing activities for demo project...") - ToolGlobals = CDFToolConfig(client_name="cdf-project-templates") + ToolGlobals = CDFToolConfig() try: print("Running tr_asset_oid_workmate_asset_hierarchy...") ToolGlobals.client.transformations.run(transformation_external_id="tr_asset_oid_workmate_asset_hierarchy") diff --git a/tests/test_cdf_tk/test_utils.py b/tests/test_cdf_tk/test_utils.py index d1106f515..4e9761cc3 100644 --- a/tests/test_cdf_tk/test_utils.py +++ b/tests/test_cdf_tk/test_utils.py @@ -33,28 +33,27 @@ DATA_FOLDER = THIS_FOLDER / "load_data" -def mocked_init(self, client_name: str): - self._client_name = client_name +def mocked_init(self): self._client = CogniteClientMock() self._data_set_id_by_external_id = {} def test_init(): with patch.object(CDFToolConfig, "__init__", mocked_init): - instance = CDFToolConfig(client_name="cdf-project-templates") + instance = CDFToolConfig() assert isinstance(instance._client, CogniteClientMock) def test_dataset_missing_acl(): with patch.object(CDFToolConfig, "__init__", mocked_init): with pytest.raises(CogniteAuthError): - instance = CDFToolConfig(client_name="cdf-project-templates") + instance = CDFToolConfig() instance.verify_dataset("test") def test_dataset_create(): with patch.object(CDFToolConfig, "__init__", mocked_init): - instance = CDFToolConfig(client_name="cdf-project-templates") + instance = CDFToolConfig() instance._client.config.project = "cdf-project-templates" instance._client.iam.token.inspect = Mock( spec=TokenAPI.inspect, From fef2ba9f936f8d102f3f567caaaa29d81c92a37f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 10:51:15 +0100 Subject: [PATCH 69/90] Restoring environments.yaml after accidentally checking in a local version --- cognite_toolkit/environments.yaml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/cognite_toolkit/environments.yaml b/cognite_toolkit/environments.yaml index 946d8b854..ca6ed0597 100644 --- a/cognite_toolkit/environments.yaml +++ b/cognite_toolkit/environments.yaml @@ -24,10 +24,14 @@ demo: - cdf_demo_infield - cdf_oid_example_data local: - project: trial-572dca111144a5196a6b1 + project: type: dev deploy: - - cdf_data_pipeline_asset_valhall + - cdf_auth_readwrite_all + - cdf_apm_base + - cdf_oid_example_data + - cdf_infield_common + - cdf_infield_location dev: project: type: dev @@ -43,5 +47,4 @@ prod: project: type: prod deploy: - - cdf_infield - + - cdf_infield \ No newline at end of file From f296ab8e16ef5a774cab336e037c4c6d2b91363d Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Fri, 15 Dec 2023 11:47:59 +0100 Subject: [PATCH 70/90] [CDF-20466] Dry run Test to Check that Verify_Dataset is not called with --dry-run flag (#217) * tests: Moved out init fixture * tests: Added test to ensure no verify calls in dry run * tests: Rewrote ApprovalClient * refactor: ensure no creation and delete calls in dry run * refactor: finish dry run test * tests: Upadated test data * tests: Handle sub apis when checking for mocked * tests: Mock extraction pipeline config * tests: regen test data * tests: allow list in APIResource * tests: documented the new Approval Client * docs; error message * refactor; removed unused --- tests/conftest.py | 1009 +++++++++++------ tests/test_approval_modules.py | 90 +- .../cdf_data_pipeline_asset_valhall.yaml | 34 + .../cdf_data_pipeline_asset_valhall.yaml | 2 + tests/test_cdf_tk/test_load.py | 16 +- 5 files changed, 795 insertions(+), 356 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 7caebaa9c..fdfb4beb1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,393 +1,736 @@ from __future__ import annotations import hashlib -import inspect import itertools from collections import defaultdict -from collections.abc import MutableSequence, Sequence +from collections.abc import Sequence +from dataclasses import dataclass from pathlib import Path -from typing import Any +from typing import Any, Callable, Literal, cast from unittest.mock import MagicMock import pandas as pd import pytest from cognite.client import CogniteClient -from cognite.client._api.data_modeling.containers import ContainersAPI -from cognite.client._api.data_modeling.data_models import DataModelsAPI -from cognite.client._api.data_modeling.graphql import DataModelingGraphQLAPI -from cognite.client._api.data_modeling.instances import InstancesAPI -from cognite.client._api.data_modeling.spaces import SpacesAPI -from cognite.client._api.data_modeling.views import ViewsAPI -from cognite.client._api.data_sets import DataSetsAPI -from cognite.client._api.datapoints import DatapointsAPI -from cognite.client._api.files import FilesAPI -from cognite.client._api.iam import GroupsAPI -from cognite.client._api.raw import RawDatabasesAPI, RawRowsAPI, RawTablesAPI -from cognite.client._api.time_series import TimeSeriesAPI -from cognite.client._api.transformations import TransformationsAPI, TransformationSchedulesAPI -from cognite.client._api_client import APIClient from cognite.client.data_classes import ( Database, DatabaseList, + Datapoints, DatapointsList, + DataSet, DataSetList, + ExtractionPipeline, + ExtractionPipelineConfig, + ExtractionPipelineList, + FileMetadata, FileMetadataList, + Group, GroupList, + Row, RowList, + Table, TableList, + TimeSeries, TimeSeriesList, + Transformation, TransformationList, + TransformationSchedule, TransformationScheduleList, ) from cognite.client.data_classes._base import CogniteResource, CogniteResourceList from cognite.client.data_classes.data_modeling import ( + Container, + ContainerApply, ContainerApplyList, ContainerList, + DataModel, + DataModelApply, DataModelApplyList, DataModelList, + Node, + NodeApply, NodeApplyList, NodeList, + Space, + SpaceApply, SpaceApplyList, SpaceList, VersionedDataModelingId, + View, + ViewApply, ViewApplyList, ViewList, ) from cognite.client.data_classes.data_modeling.ids import EdgeId, InstanceId, NodeId -from cognite.client.testing import monkeypatch_cognite_client +from cognite.client.testing import CogniteClientMock, monkeypatch_cognite_client TEST_FOLDER = Path(__file__).resolve().parent -@pytest.fixture -def cognite_client_approval() -> CogniteClient: - """ - Change directory to new_dir and return to the original directory when exiting the context. +class ApprovalCogniteClient: + """A mock CogniteClient that is used for testing the clean, deploy commands + of the cognite-toolkit. Args: - new_dir: The new directory to change to. + mock_client: The mock client to use. """ - with monkeypatch_cognite_client() as client: - written_resources: dict[str, Sequence[CogniteResource | dict[str, Any]]] = {} - deleted_resources: dict[str, list[str | int | dict[str, Any]]] = defaultdict(list) - created_resources: dict[str, list[CogniteResource]] = defaultdict(list) - client.iam.groups = create_mock_api( - client, GroupsAPI, GroupList, written_resources, deleted_resources, created_resources - ) - client.data_sets = create_mock_api( - client, DataSetsAPI, DataSetList, written_resources, deleted_resources, created_resources - ) - client.time_series = create_mock_api( - client, TimeSeriesAPI, TimeSeriesList, written_resources, deleted_resources, created_resources - ) - client.raw.databases = create_mock_api( - client, RawDatabasesAPI, DatabaseList, written_resources, deleted_resources, created_resources - ) - client.raw.tables = create_mock_api( - client, RawTablesAPI, TableList, written_resources, deleted_resources, created_resources - ) - client.transformations = create_mock_api( - client, TransformationsAPI, TransformationList, written_resources, deleted_resources, created_resources - ) - client.transformations.schedules = create_mock_api( - client, - TransformationSchedulesAPI, - TransformationScheduleList, - written_resources, - deleted_resources, - created_resources, - ) - client.data_modeling.containers = create_mock_api( - client, - ContainersAPI, - ContainerList, - written_resources, - deleted_resources, - created_resources, - ContainerApplyList, - ) - client.data_modeling.views = create_mock_api( - client, ViewsAPI, ViewList, written_resources, deleted_resources, created_resources, ViewApplyList - ) - client.data_modeling.data_models = create_mock_api( - client, - DataModelsAPI, - DataModelList, - written_resources, - deleted_resources, - created_resources, - DataModelApplyList, - ) - client.data_modeling.spaces = create_mock_api( - client, SpacesAPI, SpaceList, written_resources, deleted_resources, created_resources, SpaceApplyList - ) - client.raw.rows = create_mock_api( - client, RawRowsAPI, RowList, written_resources, deleted_resources, created_resources - ) - client.time_series.data = create_mock_api( - client, DatapointsAPI, DatapointsList, written_resources, deleted_resources, created_resources - ) - client.files = create_mock_api( - client, FilesAPI, FileMetadataList, written_resources, deleted_resources, created_resources - ) - client.data_modeling.graphql = create_mock_api( - client, - DataModelingGraphQLAPI, - DataModelList, - written_resources, - deleted_resources, - created_resources, - DataModelApplyList, - ) - client.data_modeling.instances = create_mock_api( - client, InstancesAPI, NodeList, written_resources, deleted_resources, created_resources, NodeApplyList - ) - - def dump() -> dict[str, Any]: - dumped = {} - for key in sorted(written_resources): - values = written_resources[key] - if values: - dumped[key] = sorted( - [value.dump(camel_case=True) if hasattr(value, "dump") else value for value in values], - key=lambda x: x.get("externalId", x.get("dbName", x.get("db_name", x.get("name")))), - ) - if deleted_resources: - dumped["deleted"] = {} - for key in sorted(deleted_resources): - values = deleted_resources[key] - - def sort_deleted(x): - if not isinstance(x, dict): - return x - if "externalId" in x: - return x["externalId"] - if "db_name" in x and "name" in x and isinstance(x["name"], list): - return x["db_name"] + "/" + x["name"][0] - return "missing" - - if values: - dumped["deleted"][key] = sorted( - values, - key=sort_deleted, + + def __init__(self, mock_client: CogniteClientMock): + self.mock_client = mock_client + # This is used to simulate the existing resources in CDF + self._existing_resources: dict[str, list[CogniteResource]] = defaultdict(list) + # This is used to log all delete operations + self._deleted_resources: dict[str, list[str | int | dict[str, Any]]] = defaultdict(list) + # This is used to log all create operations + self._created_resources: dict[str, list[CogniteResource | dict[str, Any]]] = defaultdict(list) + + # This is used to log all operations + self._delete_methods: dict[str, list[MagicMock]] = defaultdict(list) + self._create_methods: dict[str, list[MagicMock]] = defaultdict(list) + self._retrieve_methods: dict[str, list[MagicMock]] = defaultdict(list) + + # Setup all mock methods + for resource in _API_RESOURCES: + parts = resource.api_name.split(".") + mock_api = mock_client + for part in parts: + if not hasattr(mock_api, part): + raise ValueError(f"Invalid api name {resource.api_name}, could not find {part}") + mock_api = getattr(mock_api, part) + for method_type, methods in resource.methods.items(): + method_factory: Callable = { + "create": self._create_create_method, + "delete": self._create_delete_method, + "retrieve": self._create_retrieve_method, + }[method_type] + method_dict = { + "create": self._create_methods, + "delete": self._delete_methods, + "retrieve": self._retrieve_methods, + }[method_type] + for mock_method in methods: + if not hasattr(mock_api, mock_method.api_class_method): + raise ValueError( + f"Invalid api method {mock_method.api_class_method} for resource {resource.api_name}" ) + method = getattr(mock_api, mock_method.api_class_method) + method.side_effect = method_factory(resource, mock_method.mock_name, mock_client) + method_dict[resource.resource_cls.__name__].append(method) - return dumped + @property + def client(self) -> CogniteClient: + return cast(CogniteClient, self.mock_client) - client.dump = dump + def append(self, resource_cls: type[CogniteResource], items: CogniteResource | Sequence[CogniteResource]) -> None: + """This is used to simulate existing resources in CDF. - try: - yield client + Args: + resource_cls: The type of resource this is. + items: The list of resources to append. - finally: - written_resources.clear() + """ + if isinstance(items, Sequence): + self._existing_resources[resource_cls.__name__].extend(items) + else: + self._existing_resources[resource_cls.__name__].append(items) + + def _create_delete_method(self, resource: APIResource, mock_method: str, client: CogniteClient) -> Callable: + deleted_resources = self._deleted_resources + resource_cls = resource.resource_cls + + def delete_id_external_id( + id: int | Sequence[int] | None = None, + external_id: str | Sequence[str] | None = None, + **_, + ) -> list: + deleted = [] + if not isinstance(id, str) and isinstance(id, Sequence): + deleted.extend({"id": i} for i in id) + elif isinstance(id, int): + deleted.append({"id": id}) + if isinstance(external_id, str): + deleted.append({"externalId": external_id}) + elif isinstance(external_id, Sequence): + deleted.extend({"externalId": i} for i in external_id) + if deleted: + deleted_resources[resource_cls.__name__].extend(deleted) + return deleted + + def delete_data_modeling(ids: VersionedDataModelingId | Sequence[VersionedDataModelingId]) -> list: + deleted = [] + if isinstance(ids, (VersionedDataModelingId, InstanceId)): + deleted.append(ids.dump(camel_case=True)) + elif isinstance(ids, Sequence): + deleted.extend([id.dump(camel_case=True) for id in ids]) + if deleted: + deleted_resources[resource_cls.__name__].extend(deleted) + return deleted + + def delete_instances( + nodes: NodeId | Sequence[NodeId] | tuple[str, str] | Sequence[tuple[str, str]] | None = None, + edges: EdgeId | Sequence[EdgeId] | tuple[str, str] | Sequence[tuple[str, str]] | None = None, + ) -> list: + deleted = [] + if isinstance(nodes, NodeId): + deleted.append(nodes.dump(camel_case=True, include_instance_type=True)) + elif isinstance(nodes, tuple): + deleted.append(NodeId(*nodes).dump(camel_case=True, include_instance_type=True)) + elif isinstance(edges, EdgeId): + deleted.append(edges.dump(camel_case=True, include_instance_type=True)) + elif isinstance(edges, tuple): + deleted.append(EdgeId(*edges).dump(camel_case=True, include_instance_type=True)) + elif isinstance(nodes, Sequence): + deleted.extend( + [ + node.dump(camel_case=True, include_instance_type=True) if isinstance(node, NodeId) else node + for node in nodes + ] + ) + elif isinstance(edges, Sequence): + deleted.extend( + [ + edge.dump(camel_case=True, include_instance_type=True) if isinstance(edge, EdgeId) else edge + for edge in edges + ] + ) + if deleted: + deleted_resources[resource_cls.__name__].extend(deleted) + return deleted + + def delete_space(spaces: str | Sequence[str]) -> list: + deleted = [] + if isinstance(spaces, str): + deleted.append(spaces) + elif isinstance(spaces, Sequence): + deleted.extend(spaces) + if deleted: + deleted_resources[resource_cls.__name__].extend(deleted) + return deleted + + def delete_raw(db_name: str, name: str | Sequence[str]) -> list: + deleted = [{"db_name": db_name, "name": name if isinstance(name, str) else sorted(name)}] + deleted_resources[resource_cls.__name__].extend(deleted) + return deleted + + available_delete_methods = { + fn.__name__: fn + for fn in [ + delete_id_external_id, + delete_instances, + delete_raw, + delete_data_modeling, + delete_space, + ] + } + if mock_method not in available_delete_methods: + raise ValueError( + f"Invalid mock delete method {mock_method} for resource {resource_cls.__name__}. " + f"Supported {list(available_delete_methods)}" + ) -def create_mock_api( - client: CogniteClient, - api_client: type[APIClient], - read_list_cls: type[CogniteResourceList], - written_resources: dict[str, MutableSequence[CogniteResource | dict[str, Any]]], - deleted_resources: dict[str, list[str | int | dict[str, Any]]], - created_resources: dict[str, list[CogniteResource]], - write_list_cls: type[CogniteResourceList] | None = None, -) -> MagicMock: - resource_cls = read_list_cls._RESOURCE - write_list_cls = write_list_cls or read_list_cls - write_resource_cls = write_list_cls._RESOURCE + method = available_delete_methods[mock_method] + return method + + def _create_create_method(self, resource: APIResource, mock_method: str, client: CogniteClient) -> Callable: + created_resources = self._created_resources + write_resource_cls = resource.write_cls + write_list_cls = resource.write_list_cls + resource_cls = resource.resource_cls + + def create(*args, **kwargs) -> Any: + created = [] + for value in itertools.chain(args, kwargs.values()): + if isinstance(value, write_resource_cls): + created.append(value) + elif isinstance(value, Sequence) and all(isinstance(v, write_resource_cls) for v in value): + created.extend(value) + elif isinstance(value, str) and issubclass(write_resource_cls, Database): + created.append(Database(name=value)) + created_resources[resource_cls.__name__].extend(created) + return write_list_cls(created) + + def insert_dataframe(*args, **kwargs) -> None: + args = list(args) + kwargs = dict(kwargs) + dataframe_hash = "" + dataframe_cols = [] + for arg in list(args): + if isinstance(arg, pd.DataFrame): + args.remove(arg) + dataframe_hash = int( + hashlib.sha256(pd.util.hash_pandas_object(arg, index=True).values).hexdigest(), 16 + ) + dataframe_cols = list(arg.columns) + break + + for key in list(kwargs): + if isinstance(kwargs[key], pd.DataFrame): + value = kwargs.pop(key) + dataframe_hash = int( + hashlib.sha256(pd.util.hash_pandas_object(value, index=True).values).hexdigest(), 16 + ) + dataframe_cols = list(value.columns) + break + if not dataframe_hash: + raise ValueError("No dataframe found in arguments") + name = "_".join([str(arg) for arg in itertools.chain(args, kwargs.values())]) + if not name: + name = "_".join(dataframe_cols) + created_resources[resource_cls.__name__].append( + { + "name": name, + "args": args, + "kwargs": kwargs, + "dataframe": dataframe_hash, + "columns": dataframe_cols, + } + ) - written_resources[resource_cls.__name__] = write_list_cls([]) - mock = MagicMock(spec=api_client) + def upload(*args, **kwargs) -> None: + name = "" + for k, v in kwargs.items(): + if isinstance(v, Path) or (isinstance(v, str) and Path(v).exists()): + kwargs[k] = "/".join(Path(v).relative_to(TEST_FOLDER).parts) + name = Path(v).name + + created_resources[resource_cls.__name__].append( + { + "name": name, + "args": list(args), + "kwargs": dict(kwargs), + } + ) - def append(value: CogniteResource | Sequence[CogniteResource]) -> None: - if isinstance(value, Sequence): - created_resources[resource_cls.__name__].extend(value) - else: - created_resources[resource_cls.__name__].append(value) - - mock.append = append - - def return_values(*args, **kwargs): - return read_list_cls(created_resources[resource_cls.__name__], cognite_client=client) - - if hasattr(api_client, "list"): - mock.list = return_values - if hasattr(api_client, "retrieve"): - mock.retrieve = return_values - if hasattr(api_client, "retrieve_multiple"): - mock.retrieve_multiple = return_values - - def create(*args, **kwargs) -> Any: - created = [] - for value in itertools.chain(args, kwargs.values()): - if isinstance(value, write_resource_cls): - created.append(value) - elif isinstance(value, Sequence) and all(isinstance(v, write_resource_cls) for v in value): - created.extend(value) - elif isinstance(value, str) and issubclass(write_resource_cls, Database): - created.append(Database(name=value)) - written_resources[resource_cls.__name__].extend(created) - return write_list_cls(created) - - def insert_dataframe(*args, **kwargs) -> None: - args = list(args) - kwargs = dict(kwargs) - dataframe_hash = "" - dataframe_cols = [] - for arg in list(args): - if isinstance(arg, pd.DataFrame): - args.remove(arg) - dataframe_hash = int(hashlib.sha256(pd.util.hash_pandas_object(arg, index=True).values).hexdigest(), 16) - dataframe_cols = list(arg.columns) - break - - for key in list(kwargs): - if isinstance(kwargs[key], pd.DataFrame): - value = kwargs.pop(key) - dataframe_hash = int( - hashlib.sha256(pd.util.hash_pandas_object(value, index=True).values).hexdigest(), 16 - ) - dataframe_cols = list(value.columns) - break - if not dataframe_hash: - raise ValueError("No dataframe found in arguments") - name = "_".join([str(arg) for arg in itertools.chain(args, kwargs.values())]) - if not name: - name = "_".join(dataframe_cols) - written_resources[resource_cls.__name__].append( - { - "name": name, - "args": args, - "kwargs": kwargs, - "dataframe": dataframe_hash, - "columns": dataframe_cols, - } - ) - - def upload(*args, **kwargs) -> None: - name = "" - for k, v in kwargs.items(): - if isinstance(v, Path) or (isinstance(v, str) and Path(v).exists()): - kwargs[k] = "/".join(Path(v).relative_to(TEST_FOLDER).parts) - name = Path(v).name - - written_resources[resource_cls.__name__].append( - { - "name": name, - "args": list(args), - "kwargs": dict(kwargs), - } - ) - - def apply_dml(*args, **kwargs): - data = dict(kwargs) - data["args"] = list(args) - written_resources[resource_cls.__name__].append(data) - - def delete_core( - id: int | Sequence[int] | None = None, - external_id: str | Sequence[str] | None = None, - **_, - ) -> list: - deleted = [] - if not isinstance(id, str) and isinstance(id, Sequence): - deleted.extend({"id": i} for i in id) - elif isinstance(id, int): - deleted.append({"id": id}) - if isinstance(external_id, str): - deleted.append({"externalId": external_id}) - elif isinstance(external_id, Sequence): - deleted.extend({"externalId": i} for i in external_id) - if deleted: - deleted_resources[resource_cls.__name__].extend(deleted) - return deleted - - def delete_data_modeling(ids: VersionedDataModelingId | Sequence[VersionedDataModelingId]) -> list: - deleted = [] - if isinstance(ids, (VersionedDataModelingId, InstanceId)): - deleted.append(ids.dump(camel_case=True)) - elif isinstance(ids, Sequence): - deleted.extend([id.dump(camel_case=True) for id in ids]) - if deleted: - deleted_resources[resource_cls.__name__].extend(deleted) - return deleted - - def delete_instances( - nodes: NodeId | Sequence[NodeId] | tuple[str, str] | Sequence[tuple[str, str]] | None = None, - edges: EdgeId | Sequence[EdgeId] | tuple[str, str] | Sequence[tuple[str, str]] | None = None, - ) -> list: - deleted = [] - if isinstance(nodes, NodeId): - deleted.append(nodes.dump(camel_case=True, include_instance_type=True)) - elif isinstance(nodes, tuple): - deleted.append(NodeId(*nodes).dump(camel_case=True, include_instance_type=True)) - elif isinstance(edges, EdgeId): - deleted.append(edges.dump(camel_case=True, include_instance_type=True)) - elif isinstance(edges, tuple): - deleted.append(EdgeId(*edges).dump(camel_case=True, include_instance_type=True)) - elif isinstance(nodes, Sequence): - deleted.extend( - [ - node.dump(camel_case=True, include_instance_type=True) if isinstance(node, NodeId) else node - for node in nodes - ] + available_create_methods = {fn.__name__: fn for fn in [create, insert_dataframe, upload]} + if mock_method not in available_create_methods: + raise ValueError( + f"Invalid mock create method {mock_method} for resource {resource_cls.__name__}. Supported {available_create_methods.keys()}" ) - elif isinstance(edges, Sequence): - deleted.extend( - [ - edge.dump(camel_case=True, include_instance_type=True) if isinstance(edge, EdgeId) else edge - for edge in edges - ] + method = available_create_methods[mock_method] + return method + + def _create_retrieve_method(self, resource: APIResource, mock_method: str, client: CogniteClient) -> Callable: + existing_resources = self._existing_resources + resource_cls = resource.resource_cls + read_list_cls = resource.list_cls + + def return_values(*args, **kwargs): + return read_list_cls(existing_resources[resource_cls.__name__], cognite_client=client) + + def return_value(*args, **kwargs): + return read_list_cls(existing_resources[resource_cls.__name__], cognite_client=client)[0] + + available_retrieve_methods = { + fn.__name__: fn + for fn in [ + return_values, + return_value, + ] + } + if mock_method not in available_retrieve_methods: + raise ValueError( + f"Invalid mock retrieve method {mock_method} for resource {resource_cls.__name__}. Supported {available_retrieve_methods.keys()}" ) + method = available_retrieve_methods[mock_method] + return method + + def dump(self) -> dict[str, Any]: + """This returns a dictionary with all the resources that have been created and deleted. + + Returns: + A dict with the resources that have been created and deleted, {resource_name: [resource, ...]} + """ + dumped = {} + for key in sorted(self._created_resources): + values = self._created_resources[key] + if values: + dumped[key] = sorted( + [value.dump(camel_case=True) if hasattr(value, "dump") else value for value in values], + key=lambda x: x.get("externalId", x.get("dbName", x.get("db_name", x.get("name")))), + ) + if self._deleted_resources: + dumped["deleted"] = {} + for key in sorted(self._deleted_resources): + values = self._deleted_resources[key] + + def sort_deleted(x): + if not isinstance(x, dict): + return x + if "externalId" in x: + return x["externalId"] + if "db_name" in x and "name" in x and isinstance(x["name"], list): + return x["db_name"] + "/" + x["name"][0] + return "missing" - if deleted: - deleted_resources[resource_cls.__name__].extend(deleted) - return deleted - - def delete_space(spaces: str | Sequence[str]) -> list: - deleted = [] - if isinstance(spaces, str): - deleted.append(spaces) - elif isinstance(spaces, Sequence): - deleted.extend(spaces) - if deleted: - deleted_resources[resource_cls.__name__].extend(deleted) - return deleted - - def delete_raw(db_name: str, name: str | Sequence[str]) -> list: - deleted = [{"db_name": db_name, "name": name if isinstance(name, str) else sorted(name)}] - deleted_resources[resource_cls.__name__].extend(deleted) - return deleted - - if hasattr(api_client, "create"): - mock.create = create - elif hasattr(api_client, "apply"): - mock.apply = create - - if hasattr(api_client, "upsert"): - mock.upsert = create - - if hasattr(api_client, "insert_dataframe"): - mock.insert_dataframe = insert_dataframe - - if hasattr(api_client, "upload"): - mock.upload = upload - - if hasattr(api_client, "apply_dml"): - mock.apply_dml = apply_dml - - if hasattr(api_client, "delete"): - signature = inspect.signature(api_client.delete) - if "ids" in signature.parameters: - mock.delete = delete_data_modeling - elif "nodes" in signature.parameters: - mock.delete = delete_instances - elif "spaces" in signature.parameters: - mock.delete = delete_space - elif "db_name" in signature.parameters: - mock.delete = delete_raw - else: - mock.delete = delete_core + if values: + dumped["deleted"][key] = sorted( + values, + key=sort_deleted, + ) + + return dumped + + def create_calls(self) -> dict[str, int]: + """This returns all the calls that have been made to the mock client to create methods. + + For example, if you have mocked the 'time_series' API, and the code you test calls the 'time_series.create' method, + then this method will return {'time_series': 1} + """ + return { + key: call_count + for key, methods in self._create_methods.items() + if (call_count := sum(method.call_count for method in methods)) + } + + def retrieve_calls(self) -> dict[str, int]: + """This returns all the calls that have been made to the mock client to retrieve methods. + + For example, if you have mocked the 'time_series' API, and the code you test calls the 'time_series.list' method, + then this method will return {'time_series': 1} + """ + return { + key: call_count + for key, methods in self._retrieve_methods.items() + if (call_count := sum(method.call_count for method in methods)) + } + + def delete_calls(self) -> dict[str, int]: + """This returns all the calls that have been made to the mock client to delete methods. + + For example, if you have mocked the 'time_series' API, and the code you test calls the 'time_series.delete' method, + then this method will return {'time_series': 1} + """ + return { + key: call_count + for key, methods in self._delete_methods.items() + if (call_count := sum(method.call_count for method in methods)) + } + + def not_mocked_calls(self) -> dict[str, int]: + """This returns all the calls that have been made to the mock client to sub APIs that have not been mocked. + + For example, if you have not mocked the 'time_series' API, and the code you test calls the 'time_series.list' method, + then this method will return {'time_series.list': 1} + + Returns: + A dict with the calls that have been made to sub APIs that have not been mocked, {api_name.method_name: call_count} + """ + mocked_apis: dict[str : set[str]] = defaultdict(set) + for r in _API_RESOURCES: + if r.api_name.count(".") == 1: + api_name, sub_api = r.api_name.split(".") + elif r.api_name.count(".") == 0: + api_name, sub_api = r.api_name, "" + else: + raise ValueError(f"Invalid api name {r.api_name}") + mocked_apis[api_name] |= {sub_api} if sub_api else set() + + not_mocked: dict[str, int] = defaultdict(int) + for api_name, api in vars(self.mock_client).items(): + if not isinstance(api, MagicMock) or api_name.startswith("_") or api_name.startswith("assert_"): + continue + mocked_sub_apis = mocked_apis.get(api_name, set()) + for method_name in dir(api): + if method_name.startswith("_") or method_name.startswith("assert_"): + continue + method = getattr(api, method_name) + if api_name not in mocked_apis and isinstance(method, MagicMock) and method.call_count: + not_mocked[f"{api_name}.{method_name}"] += method.call_count + if hasattr(method, "_spec_set") and method._spec_set and method_name not in mocked_sub_apis: + # this is a sub api that must be checked + for sub_method_name in dir(method): + if sub_method_name.startswith("_") or sub_method_name.startswith("assert_"): + continue + sub_method = getattr(method, sub_method_name) + if isinstance(sub_method, MagicMock) and sub_method.call_count: + not_mocked[f"{api_name}.{method_name}.{sub_method_name}"] += sub_method.call_count + return dict(not_mocked) + + +@pytest.fixture +def cognite_client_approval() -> ApprovalCogniteClient: + with monkeypatch_cognite_client() as client: + approval_client = ApprovalCogniteClient(client) + yield approval_client + + +@dataclass +class Method: + """Represent a method in the CogniteClient that should be mocked + + Args: + api_class_method: The name of the method in the CogniteClient, for example, 'create', 'insert_dataframe' + mock_name: The name of the method in the ApprovalCogniteClient, for example, 'create', 'insert_dataframe' + + The available mock methods you can see inside + * ApprovalCogniteClient._create_create_method, + * ApprovalCogniteClient._create_delete_method, + * ApprovalCogniteClient._create_retrieve_method + + """ + + api_class_method: str + mock_name: str + + +@dataclass +class APIResource: + """This is used to define the resources that should be mocked in the ApprovalCogniteClient + + Args: + api_name: The name of the resource in the CogniteClient, for example, 'time_series', 'data_modeling.views' + resource_cls: The resource class for the API + list_cls: The list resource API class + methods: The methods that should be mocked + _write_cls: The write resource class for the API. For example, the writing class for 'data_modeling.views' is 'ViewApply' + _write_list_cls: The write list class in the CogniteClient + + """ - return mock + api_name: str + resource_cls: type[CogniteResource] + list_cls: type[CogniteResourceList] | type[list] + methods: dict[Literal["create", "delete", "retrieve"], list[Method]] + + _write_cls: type[CogniteResource] | None = None + _write_list_cls: type[CogniteResourceList] | None = None + + @property + def write_cls(self) -> type[CogniteResource]: + return self._write_cls or self.resource_cls + + @property + def write_list_cls(self) -> type[CogniteResourceList]: + return self._write_list_cls or self.list_cls + + +# This is used to define the resources that should be mocked in the ApprovalCogniteClient +# You can add more resources here if you need to mock more resources +_API_RESOURCES = [ + APIResource( + api_name="iam.groups", + resource_cls=Group, + list_cls=GroupList, + methods={ + "create": [Method(api_class_method="create", mock_name="create")], + "delete": [Method(api_class_method="delete", mock_name="delete_id_external_id")], + "retrieve": [Method(api_class_method="list", mock_name="return_values")], + }, + ), + APIResource( + api_name="data_sets", + resource_cls=DataSet, + list_cls=DataSetList, + methods={ + "create": [Method(api_class_method="create", mock_name="create")], + "retrieve": [ + Method(api_class_method="list", mock_name="return_values"), + Method(api_class_method="retrieve", mock_name="return_values"), + Method(api_class_method="retrieve_multiple", mock_name="return_values"), + ], + }, + ), + APIResource( + api_name="time_series", + resource_cls=TimeSeries, + list_cls=TimeSeriesList, + methods={ + "create": [Method(api_class_method="create", mock_name="create")], + "delete": [Method(api_class_method="delete", mock_name="delete_id_external_id")], + "retrieve": [ + Method(api_class_method="list", mock_name="return_values"), + Method(api_class_method="retrieve", mock_name="return_values"), + Method(api_class_method="retrieve_multiple", mock_name="return_values"), + ], + }, + ), + APIResource( + api_name="raw.databases", + resource_cls=Database, + list_cls=DatabaseList, + methods={ + "create": [Method(api_class_method="create", mock_name="create")], + "retrieve": [Method(api_class_method="list", mock_name="return_values")], + "delete": [Method(api_class_method="delete", mock_name="delete_raw")], + }, + ), + APIResource( + api_name="raw.tables", + resource_cls=Table, + list_cls=TableList, + methods={ + "create": [Method(api_class_method="create", mock_name="create")], + "retrieve": [Method(api_class_method="list", mock_name="return_values")], + "delete": [Method(api_class_method="delete", mock_name="delete_raw")], + }, + ), + APIResource( + api_name="raw.rows", + resource_cls=Row, + list_cls=RowList, + methods={ + "create": [Method(api_class_method="insert_dataframe", mock_name="insert_dataframe")], + "delete": [Method(api_class_method="delete", mock_name="delete_raw")], + "retrieve": [ + Method(api_class_method="list", mock_name="return_values"), + Method(api_class_method="retrieve", mock_name="return_values"), + ], + }, + ), + APIResource( + api_name="transformations", + resource_cls=Transformation, + list_cls=TransformationList, + methods={ + "create": [Method(api_class_method="create", mock_name="create")], + "delete": [Method(api_class_method="delete", mock_name="delete_id_external_id")], + "retrieve": [ + Method(api_class_method="list", mock_name="return_values"), + Method(api_class_method="retrieve", mock_name="return_value"), + Method(api_class_method="retrieve_multiple", mock_name="return_values"), + ], + }, + ), + APIResource( + api_name="transformations.schedules", + resource_cls=TransformationSchedule, + list_cls=TransformationScheduleList, + methods={ + "create": [Method(api_class_method="create", mock_name="create")], + "delete": [Method(api_class_method="delete", mock_name="delete_id_external_id")], + "retrieve": [ + Method(api_class_method="list", mock_name="return_values"), + Method(api_class_method="retrieve", mock_name="return_value"), + Method(api_class_method="retrieve_multiple", mock_name="return_values"), + ], + }, + ), + APIResource( + api_name="extraction_pipelines", + resource_cls=ExtractionPipeline, + list_cls=ExtractionPipelineList, + methods={ + "create": [Method(api_class_method="create", mock_name="create")], + "delete": [Method(api_class_method="delete", mock_name="delete_id_external_id")], + "retrieve": [ + Method(api_class_method="list", mock_name="return_values"), + Method(api_class_method="retrieve", mock_name="return_value"), + Method(api_class_method="retrieve_multiple", mock_name="return_values"), + ], + }, + ), + APIResource( + api_name="extraction_pipelines.config", + resource_cls=ExtractionPipelineConfig, + list_cls=ExtractionPipelineConfig, + methods={ + "create": [Method(api_class_method="create", mock_name="create")], + "retrieve": [ + Method(api_class_method="list", mock_name="return_values"), + Method(api_class_method="retrieve", mock_name="return_value"), + ], + }, + ), + APIResource( + api_name="data_modeling.containers", + resource_cls=Container, + list_cls=ContainerList, + _write_cls=ContainerApply, + _write_list_cls=ContainerApplyList, + methods={ + "create": [Method(api_class_method="apply", mock_name="create")], + "delete": [Method(api_class_method="delete", mock_name="delete_data_modeling")], + "retrieve": [ + Method(api_class_method="list", mock_name="return_values"), + Method(api_class_method="retrieve", mock_name="return_values"), + ], + }, + ), + APIResource( + api_name="data_modeling.views", + resource_cls=View, + list_cls=ViewList, + _write_cls=ViewApply, + _write_list_cls=ViewApplyList, + methods={ + "create": [Method(api_class_method="apply", mock_name="create")], + "delete": [Method(api_class_method="delete", mock_name="delete_data_modeling")], + "retrieve": [ + Method(api_class_method="list", mock_name="return_values"), + Method(api_class_method="retrieve", mock_name="return_values"), + ], + }, + ), + APIResource( + api_name="data_modeling.data_models", + resource_cls=DataModel, + list_cls=DataModelList, + _write_cls=DataModelApply, + _write_list_cls=DataModelApplyList, + methods={ + "create": [Method(api_class_method="apply", mock_name="create")], + "delete": [Method(api_class_method="delete", mock_name="delete_data_modeling")], + "retrieve": [ + Method(api_class_method="list", mock_name="return_values"), + Method(api_class_method="retrieve", mock_name="return_values"), + ], + }, + ), + APIResource( + api_name="data_modeling.spaces", + resource_cls=Space, + list_cls=SpaceList, + _write_cls=SpaceApply, + _write_list_cls=SpaceApplyList, + methods={ + "create": [Method(api_class_method="apply", mock_name="create")], + "delete": [Method(api_class_method="delete", mock_name="delete_space")], + "retrieve": [ + Method(api_class_method="list", mock_name="return_values"), + Method(api_class_method="retrieve", mock_name="return_values"), + ], + }, + ), + APIResource( + api_name="time_series.data", + resource_cls=Datapoints, + list_cls=DatapointsList, + methods={ + "create": [ + Method(api_class_method="insert", mock_name="create"), + Method(api_class_method="insert_dataframe", mock_name="insert_dataframe"), + ], + }, + ), + APIResource( + api_name="files", + resource_cls=FileMetadata, + list_cls=FileMetadataList, + methods={ + "create": [Method(api_class_method="upload", mock_name="upload")], + "delete": [Method(api_class_method="delete", mock_name="delete_id_external_id")], + "retrieve": [ + Method(api_class_method="list", mock_name="return_values"), + Method(api_class_method="retrieve", mock_name="return_value"), + Method(api_class_method="retrieve_multiple", mock_name="return_values"), + ], + }, + ), + APIResource( + api_name="data_modeling.instances", + resource_cls=Node, + list_cls=NodeList, + _write_cls=NodeApply, + _write_list_cls=NodeApplyList, + methods={ + "create": [Method(api_class_method="apply", mock_name="create")], + "delete": [Method(api_class_method="delete", mock_name="delete_instances")], + "retrieve": [ + Method(api_class_method="list", mock_name="return_values"), + Method(api_class_method="retrieve", mock_name="return_values"), + ], + }, + ), +] diff --git a/tests/test_approval_modules.py b/tests/test_approval_modules.py index 06d364737..a48504154 100644 --- a/tests/test_approval_modules.py +++ b/tests/test_approval_modules.py @@ -15,12 +15,12 @@ import pytest import typer -from cognite.client import CogniteClient from pytest import MonkeyPatch from cognite_toolkit.cdf import Common, build, clean, deploy, main_init from cognite_toolkit.cdf_tk.templates import COGNITE_MODULES, iterate_modules, read_yaml_file, read_yaml_files from cognite_toolkit.cdf_tk.utils import CDFToolConfig +from tests.conftest import ApprovalCogniteClient REPO_ROOT = Path(__file__).parent.parent @@ -67,7 +67,7 @@ def local_tmp_project_path(local_tmp_path: Path): @pytest.fixture -def cdf_tool_config(cognite_client_approval: CogniteClient, monkeypatch: MonkeyPatch) -> CDFToolConfig: +def cdf_tool_config(cognite_client_approval: ApprovalCogniteClient, monkeypatch: MonkeyPatch) -> CDFToolConfig: monkeypatch.setenv("CDF_PROJECT", "pytest-project") monkeypatch.setenv("IDP_TOKEN_URL", "dummy") monkeypatch.setenv("IDP_CLIENT_ID", "dummy") @@ -76,8 +76,8 @@ def cdf_tool_config(cognite_client_approval: CogniteClient, monkeypatch: MonkeyP with chdir(REPO_ROOT): # Build must always be executed from root of the project cdf_tool = MagicMock(spec=CDFToolConfig) - cdf_tool.verify_client.return_value = cognite_client_approval - cdf_tool.verify_capabilities.return_value = cognite_client_approval + cdf_tool.verify_client.return_value = cognite_client_approval.mock_client + cdf_tool.verify_capabilities.return_value = cognite_client_approval.mock_client cdf_tool.failed = False cdf_tool.verify_dataset.return_value = 42 @@ -121,29 +121,75 @@ def fake_read_yaml_file( monkeypatch.setattr("cognite_toolkit.cdf_tk.templates.read_yaml_file", fake_read_yaml_file) +@pytest.fixture +def init_project(typer_context: typer.Context, local_tmp_project_path: Path) -> None: + main_init( + typer_context, + dry_run=False, + upgrade=False, + git=None, + init_dir=str(local_tmp_project_path), + no_backup=True, + clean=True, + ) + return None + + @pytest.mark.parametrize("module_path", list(find_all_modules())) def test_deploy_module_approval( module_path: Path, local_tmp_path: Path, local_tmp_project_path: Path, monkeypatch: MonkeyPatch, - cognite_client_approval: CogniteClient, + cognite_client_approval: ApprovalCogniteClient, cdf_tool_config: CDFToolConfig, typer_context: typer.Context, + init_project: None, data_regression, ) -> None: mock_read_yaml_files(module_path, monkeypatch) mock_read_yaml_file(module_path, monkeypatch) - main_init( + build( typer_context, - dry_run=False, - upgrade=False, - git=None, - init_dir=str(local_tmp_project_path), - no_backup=True, + source_dir=str(local_tmp_project_path), + build_dir=str(local_tmp_path), + build_env="dev", clean=True, ) + deploy( + typer_context, + build_dir=str(local_tmp_path), + build_env="dev", + interactive=False, + drop=True, + dry_run=False, + include=[], + ) + + not_mocked = cognite_client_approval.not_mocked_calls() + assert not not_mocked, ( + f"The following APIs have been called without being mocked: {not_mocked}, " + "Please update the list _API_RESOURCES in tests/conftest.py" + ) + + dump = cognite_client_approval.dump() + data_regression.check(dump, fullpath=SNAPSHOTS_DIR / f"{module_path.name}.yaml") + + +@pytest.mark.parametrize("module_path", list(find_all_modules())) +def test_deploy_dry_run_module_approval( + module_path: Path, + local_tmp_path: Path, + local_tmp_project_path: Path, + monkeypatch: MonkeyPatch, + cognite_client_approval: ApprovalCogniteClient, + cdf_tool_config: CDFToolConfig, + typer_context: typer.Context, + init_project: None, +) -> None: + mock_read_yaml_files(module_path, monkeypatch) + mock_read_yaml_file(module_path, monkeypatch) build( typer_context, @@ -158,12 +204,21 @@ def test_deploy_module_approval( build_env="dev", interactive=False, drop=True, - dry_run=False, + dry_run=True, include=[], ) - dump = cognite_client_approval.dump() - data_regression.check(dump, fullpath=SNAPSHOTS_DIR / f"{module_path.name}.yaml") + assert not ( + calls := cognite_client_approval.create_calls() + ), f"No resources should be created in dry run: got these calls: {calls}" + assert not ( + calls := cognite_client_approval.delete_calls() + ), f"No resources should be deleted in dry run: got these calls: {calls}" + assert cdf_tool_config.verify_dataset.call_count == 0, "Dataset should not be checked in dry run" + assert cdf_tool_config.verify_spaces.call_count == 0, "Spaces should not be checked in dry run" + assert ( + cdf_tool_config.verify_extraction_pipeline.call_count == 0 + ), "Extraction pipelines should not be checked in dry run" @pytest.mark.parametrize("module_path", list(find_all_modules())) @@ -172,7 +227,7 @@ def test_clean_module_approval( local_tmp_path: Path, local_tmp_project_path: Path, monkeypatch: MonkeyPatch, - cognite_client_approval: CogniteClient, + cognite_client_approval: ApprovalCogniteClient, cdf_tool_config: CDFToolConfig, typer_context: typer.Context, data_regression, @@ -206,5 +261,10 @@ def test_clean_module_approval( include=[], ) + not_mocked = cognite_client_approval.not_mocked_calls() + assert not not_mocked, ( + f"The following APIs have been called without being mocked: {not_mocked}, " + "Please update the list _API_RESOURCES in tests/conftest.py" + ) dump = cognite_client_approval.dump() data_regression.check(dump, fullpath=SNAPSHOTS_DIR_CLEAN / f"{module_path.name}.yaml") diff --git a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml index f568c481b..11de4f075 100644 --- a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml +++ b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml @@ -7,6 +7,38 @@ DataSet: transformations: '[{"externalId": "tr_asset_oid_workmate_asset_hierarchy", "type": "Transformations"}]' name: asset:oid +ExtractionPipeline: +- dataSetId: 42 + description: Asset source extraction pipeline with configuration for DB extractor + reading data from oid:workmate + documentation: "The DB Extractor is a general database extractor that connects to\ + \ a database, executes one or several queries and sends the result to CDF RAW.\n\ + \nThe extractor connects to a database over ODBC, which means that you need an\ + \ ODBC driver for your database. If you are running the Docker version of the\ + \ extractor, ODBC drivers for MySQL, MS SQL, PostgreSql and Oracle DB are preinstalled\ + \ in the image. See the example config for details on connection strings for these.\ + \ If you are running the Windows exe version of the extractor, you must provide\ + \ an ODBC driver yourself. These are typically provided by the database vendor.\n\ + \nFurther documentation is available [here](./docs/documentation.md)\n\nFor information\ + \ on development, consider the following guides:\n\n * [Development guide](guides/development.md)\n\ + \ * [Release guide](guides/release.md)" + externalId: ep_src_asset_oid_workmate + name: src:asset:oid:workmate + rawTables: + - dbName: asset_oid_workmate + tableName: assets + source: workmate +ExtractionPipelineConfig: +- config: "databases:\n- connection-string: DSN={MyPostgresDsn}\n name: postgres\n\ + \ type: odbc\nlogger:\n console:\n level: INFO\n file:\n \ + \ level: INFO\n path: file.log\nqueries:\n- database: postgres\n \ + \ destination:\n database: db-extractor\n table: postgres\n \ + \ type: raw\n incremental-field: id\n initial-start: 0\n name: test-postgres\n\ + \ primary-key: '{id}'\n query: \"SELECT\\n\\n *\\nFROM\\n\\n mytable\\\ + nWHERE\\n\\n {incremental_field} >= '{start_at}'\\n\\\n ORDER BY\\n\\\ + n {incremental_field} ASC\\n\"\n" + description: DB extractor config reading data from oid:workmate + externalId: ep_src_asset_oid_workmate Group: - capabilities: - rawAcl: @@ -127,5 +159,7 @@ TransformationSchedule: interval: 7 * * * * isPaused: true deleted: + ExtractionPipeline: + - externalId: ep_src_asset_oid_workmate Transformation: - externalId: tr_asset_oid_workmate_asset_hierarchy diff --git a/tests/test_approval_modules_snapshots_clean/cdf_data_pipeline_asset_valhall.yaml b/tests/test_approval_modules_snapshots_clean/cdf_data_pipeline_asset_valhall.yaml index d04057a23..7a44a713c 100644 --- a/tests/test_approval_modules_snapshots_clean/cdf_data_pipeline_asset_valhall.yaml +++ b/tests/test_approval_modules_snapshots_clean/cdf_data_pipeline_asset_valhall.yaml @@ -1,3 +1,5 @@ deleted: + ExtractionPipeline: + - externalId: ep_src_asset_oid_workmate Transformation: - externalId: tr_asset_oid_workmate_asset_hierarchy diff --git a/tests/test_cdf_tk/test_load.py b/tests/test_cdf_tk/test_load.py index bca592c9a..dd6f29810 100644 --- a/tests/test_cdf_tk/test_load.py +++ b/tests/test_cdf_tk/test_load.py @@ -2,7 +2,6 @@ from unittest.mock import MagicMock import pytest -from cognite.client import CogniteClient from cognite.client.data_classes import DataSet from cognite_toolkit.cdf_tk.load import ( @@ -13,6 +12,7 @@ deploy_or_clean_resources, ) from cognite_toolkit.cdf_tk.utils import CDFToolConfig +from tests.conftest import ApprovalCogniteClient THIS_FOLDER = Path(__file__).resolve().parent @@ -28,11 +28,11 @@ ], ) def test_loader_class( - loader_cls: type[Loader], directory: Path, cognite_client_approval: CogniteClient, data_regression + loader_cls: type[Loader], directory: Path, cognite_client_approval: ApprovalCogniteClient, data_regression ): cdf_tool = MagicMock(spec=CDFToolConfig) - cdf_tool.verify_client.return_value = cognite_client_approval - cdf_tool.verify_capabilities.return_value = cognite_client_approval + cdf_tool.verify_client.return_value = cognite_client_approval.mock_client + cdf_tool.verify_capabilities.return_value = cognite_client_approval.mock_client cdf_tool.data_set_id = 999 deploy_or_clean_resources( @@ -43,10 +43,10 @@ def test_loader_class( data_regression.check(dump, fullpath=SNAPSHOTS_DIR / f"{directory.name}.yaml") -def test_upsert_data_set(cognite_client_approval: CogniteClient): +def test_upsert_data_set(cognite_client_approval: ApprovalCogniteClient): cdf_tool = MagicMock(spec=CDFToolConfig) - cdf_tool.verify_client.return_value = cognite_client_approval - cdf_tool.verify_capabilities.return_value = cognite_client_approval + cdf_tool.verify_client.return_value = cognite_client_approval.mock_client + cdf_tool.verify_capabilities.return_value = cognite_client_approval.mock_client loader = DataSetsLoader.create_loader(cdf_tool) loaded = loader.load_resource(DATA_FOLDER / "data_sets" / "1.my_datasets.yaml", dry_run=False) @@ -58,7 +58,7 @@ def test_upsert_data_set(cognite_client_approval: CogniteClient): first.created_time = 42 first.last_updated_time = 42 # Simulate that the data set is already in CDF - cognite_client_approval.data_sets.append(first) + cognite_client_approval.append(DataSet, first) changed = loader.remove_unchanged(loaded) From b1793d35d63ff5c9d0b211fd1286d33e11d20bf5 Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Fri, 15 Dec 2023 11:55:09 +0100 Subject: [PATCH 71/90] CDF-toolkit version in environments.yaml (#223) * feat: added cdf-toolkit version to environments.yaml * style: fix some typos * refactor: extended build command with dumping build env * refactor: dump build environment * refactor: raise error on mismatched version * refactor: cleanup * tests: updated test * refactor; removed template version * build: updatet environments.yaml in demo * build: changelog * refactor: Handle edge case * refactor: review feedback --- CHANGELOG.cdf-tk.md | 2 + CHANGELOG.templates.md | 16 +- cognite_toolkit/_version.py | 1 - cognite_toolkit/cdf.py | 25 +-- cognite_toolkit/cdf_tk/templates.py | 288 +++++++++++----------------- cognite_toolkit/environments.yaml | 8 +- demo/environments.yaml | 5 + tests/test_approval_modules.py | 26 +-- tests/test_build.py | 15 +- 9 files changed, 163 insertions(+), 223 deletions(-) diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index d5f2ea2fe..7e3aa013f 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -75,6 +75,8 @@ Changes are grouped as follows: - Missing .sql files for transformations will now raise an error in the build step. - The build step will now raise a number of warnings for missing externalIds in the yaml files, as well as if the naming conventions are not followed. +- System section in `environments.yaml` to track local state of `cdf-toolkit`. +- Introduced a `build_environment.yaml` in the `/build` folder to track how the build was run. ### Fixed diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index eac0f2b7d..9fc21b00d 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -26,6 +26,9 @@ Changes are grouped as follows: ### Changed +- **BREAKING** All externalIds and names have been changed to follow the naming conventions for resources + in `examples/cdf_oid_example_data`, `examples/cdf_apm_simple_data_model`, `modules/cdf_apm_base`, + `modules/cdf_infield_common`, and `modules/cdf_infield_location`. - All cognite templates have been moved into `cognite_templates` folder, while `local_templates` is renamed to `custom_templates`. - Move cdf_apm_base into separate folder. - The file `local.yaml` has been renamed `environments.yaml` to better reflect its purpose. @@ -39,15 +42,8 @@ Changes are grouped as follows: - Removed transformation identity provider variables from modules and reused the global cicd_ prefixed ones. -## [0.2.0] - 2023-12-01 - -### Changed - -- **BREAKING** All externalIds and names have been changed to follow the naming conventions for resources - in `examples/cdf_oid_example_data`, `examples/cdf_apm_simple_data_model`, `modules/cdf_apm_base`, - `modules/cdf_infield_common`, and `modules/cdf_infield_location`. -## [0.1.2] - 2023-11-29 +## [0.1.0a3] - 2023-11-29 ### Changed @@ -61,7 +57,7 @@ Changes are grouped as follows: - Fix wrong reference to `apm_simple` in `examples/cdf_apm_simple_data_model` and `modules/cdf_infield_location`. - Exemplify use of a single config yaml file for multiple file resources in `examples/cdf_oid_example_data/files/files.yaml`. -## [0.1.1] - 2023-11-23 +## [0.1.0a2] - 2023-11-23 ### Changed @@ -76,6 +72,6 @@ Changes are grouped as follows: - cdf_infield_common module and the auth applications-configuration.yaml did not load group source id correctly due to source_id being used instead of sourceId. This is now fixed. -## [0.1.0] - 2023-11-21 +## [0.1.0a1] - 2023-11-21 Initial release diff --git a/cognite_toolkit/_version.py b/cognite_toolkit/_version.py index 9ee9590b7..1822f2d18 100644 --- a/cognite_toolkit/_version.py +++ b/cognite_toolkit/_version.py @@ -1,2 +1 @@ __version__ = "0.1.0a3" -__template_version__ = "0.2.0" diff --git a/cognite_toolkit/cdf.py b/cognite_toolkit/cdf.py index 7ffb7b1cb..a014c510e 100755 --- a/cognite_toolkit/cdf.py +++ b/cognite_toolkit/cdf.py @@ -17,8 +17,6 @@ from cognite_toolkit import _version from cognite_toolkit.cdf_tk import bootstrap - -# from scripts.delete import clean_out_datamodels from cognite_toolkit.cdf_tk.load import ( LOADER_BY_FOLDER_NAME, AuthLoader, @@ -26,13 +24,15 @@ deploy_or_clean_resources, ) from cognite_toolkit.cdf_tk.templates import ( + BUILD_ENVIRONMENT_FILE, COGNITE_MODULES, CONFIG_FILE, CUSTOM_MODULES, ENVIRONMENTS_FILE, + BuildEnvironment, build_config, generate_config, - read_environ_config, + read_yaml_file, ) from cognite_toolkit.cdf_tk.utils import CDFToolConfig @@ -43,7 +43,7 @@ app.add_typer(auth_app, name="auth") -_AVAILABLE_DATA_TYPES: tuple[str] = tuple(LOADER_BY_FOLDER_NAME) +_AVAILABLE_DATA_TYPES: tuple[str, ...] = tuple(LOADER_BY_FOLDER_NAME) # Common parameters handled in common callback @@ -58,7 +58,7 @@ class Common: def _version_callback(value: bool): if value: - typer.echo(f"CDF-Toolkit version: {_version.__version__}, Template version: {_version.__template_version__}") + typer.echo(f"CDF-Toolkit version: {_version.__version__}.") raise typer.Exit() @@ -190,13 +190,15 @@ def build( f"\n[bold]Environment file:[/] {environment_file.absolute().relative_to(Path.cwd())!s} and [bold]config file:[/] {config_file.absolute().relative_to(Path.cwd())!s}" ) ) + print(f" Environment is {build_env}, using that section in {ENVIRONMENTS_FILE!s}.\n") + build_ = BuildEnvironment.load(read_yaml_file(environment_file), build_env, "build") + build_.set_environment_variables() build_config( build_dir=Path(build_dir), source_dir=source_dir, config_file=config_file, - environment_file=environment_file, - build_env=build_env, + build=build_, clean=clean, verbose=ctx.obj.verbose, ) @@ -259,8 +261,9 @@ def deploy( ToolGlobals = ctx.obj.mockToolGlobals else: ToolGlobals = CDFToolConfig(cluster=ctx.obj.cluster, project=ctx.obj.project) - # Set environment variables from local.yaml - read_environ_config(root_dir=build_dir, build_env=build_env, set_env_only=True) + + build_ = BuildEnvironment.load(read_yaml_file(Path(build_dir) / BUILD_ENVIRONMENT_FILE), build_env, "deploy") + build_.set_environment_variables() print(Panel(f"[bold]Deploying config files from {build_dir} to environment {build_env}...[/]")) build_path = Path(build_dir) @@ -381,8 +384,8 @@ def clean( else: ToolGlobals = CDFToolConfig(cluster=ctx.obj.cluster, project=ctx.obj.project) - # Set environment variables from local.yaml - read_environ_config(root_dir=build_dir, build_env=build_env, set_env_only=True) + build_ = BuildEnvironment.load(read_yaml_file(Path(build_dir) / BUILD_ENVIRONMENT_FILE), build_env, "clean") + build_.set_environment_variables() Panel(f"[bold]Cleaning environment {build_env} based on config files from {build_dir}...[/]") build_path = Path(build_dir) diff --git a/cognite_toolkit/cdf_tk/templates.py b/cognite_toolkit/cdf_tk/templates.py index 26ac48242..75958573a 100644 --- a/cognite_toolkit/cdf_tk/templates.py +++ b/cognite_toolkit/cdf_tk/templates.py @@ -13,6 +13,7 @@ import yaml from rich import print +from cognite_toolkit import _version from cognite_toolkit.cdf_tk.load import LOADER_BY_FOLDER_NAME from cognite_toolkit.cdf_tk.utils import validate_case_raw, validate_config_yaml, validate_data_set_is_set @@ -20,6 +21,7 @@ DEFAULT_CONFIG_FILE = "default.config.yaml" # The environment file: ENVIRONMENTS_FILE = "environments.yaml" +BUILD_ENVIRONMENT_FILE = "build_environment.yaml" # The local config file: CONFIG_FILE = "config.yaml" # The default package files @@ -29,7 +31,6 @@ COGNITE_MODULES = "cognite_modules" CUSTOM_MODULES = "custom_modules" -TMPL_DIRS = ["common", "modules", "local_modules", "examples", "experimental"] # Add any other files below that should be included in a build EXCL_FILES = ["README.md", DEFAULT_CONFIG_FILE] # Which suffixes to exclude when we create indexed files (i.e., they are bundled with their main config file) @@ -38,112 +39,108 @@ PROC_TMPL_VARS_SUFFIX = frozenset([".yaml", ".yml", ".sql", ".csv", ".parquet", ".json", ".txt", ".md", ".html", ".py"]) -def read_environ_config( - root_dir: str = "./", - build_env: str = "dev", - tmpl_dirs: [str] = TMPL_DIRS, - set_env_only: bool = False, - verbose: bool = False, -) -> list[str]: - """Read the global configuration files and return a list of modules in correct order. +@dataclass +class BuildEnvironment: + name: Literal["dev", "local", "demo", "staging", "prod"] + project: str + build_type: str + deploy: list[str] + system: SystemVariables + + @classmethod + def load( + cls, environment_config: dict[str, Any], build_env: str, action: Literal["build", "deploy", "clean"] + ) -> BuildEnvironment: + if build_env is None: + raise ValueError("build_env must be specified") + environment = environment_config.get(build_env) + if environment is None: + raise ValueError(f"Environment {build_env} not found in {ENVIRONMENTS_FILE!s}") + system = SystemVariables.load(environment_config, action) + try: + return BuildEnvironment( + name=build_env, + project=environment["project"], + build_type=environment["type"], + deploy=environment["deploy"], + system=system, + ) + except KeyError: + print( + f" [bold red]ERROR:[/] Environment {build_env} is missing required fields 'project', 'type', or 'deploy' in {ENVIRONMENTS_FILE!s}" + ) + exit(1) - The presence of a module directory in tmpl_dirs is verified. - Yields: - List of modules in the order they should be processed. - Exception(ValueError) if a module is not found in tmpl_dirs. - """ - if not root_dir.endswith("/"): - root_dir = root_dir + "/" - tmpl_dirs = [root_dir + t for t in tmpl_dirs] - global_config = read_yaml_files(root_dir, "default.packages.yaml") - packages = global_config.get("packages", {}) - packages.update(read_yaml_files(root_dir, "packages.yaml").get("packages", {})) - environment_config = read_yaml_files(root_dir, ENVIRONMENTS_FILE) - - print(f" Environment is {build_env}, using that section in {ENVIRONMENTS_FILE}.\n") - if verbose: - print(" [bold green]INFO:[/] Found defined packages:") - for name, content in packages.items(): - print(f" {name}: {content}") - modules = [] - if len(environment_config) == 0: - return [] - try: - defs = environment_config[build_env] - except KeyError: - print(f" [bold red]ERROR:[/] Environment {build_env} not found in {ENVIRONMENTS_FILE}") - exit(1) + def dump(self) -> dict[str, Any]: + return { + self.name: { + "project": self.project, + "type": self.build_type, + "deploy": self.deploy, + }, + "__system": { + "cdf_toolkit_version": self.system.cdf_toolkit_version, + }, + } + + def dump_to_file(self, build_dir: Path) -> None: + (build_dir / BUILD_ENVIRONMENT_FILE).write_text(yaml.dump(self.dump(), sort_keys=False, indent=2)) + + def validate_environment(self): + if (project_env := os.environ.get("CDF_PROJECT", "")) != self.project: + if self.name in {"dev", "local", "demo"}: + print( + f" [bold yellow]WARNING:[/] Project name mismatch (CDF_PROJECT) between {ENVIRONMENTS_FILE!s} ({self.project}) and what is defined in environment ({project_env})." + ) + print(f" Environment is {self.name}, continuing (would have stopped for staging and prod)...") + else: + print( + f" [bold red]ERROR:[/] Project name mismatch (CDF_PROJECT) between {ENVIRONMENTS_FILE!s} ({self.project}) and what is defined in environment ({project_env=} != {self.project=})." + ) + exit(1) - os.environ["CDF_ENVIRON"] = build_env - for k, v in defs.items(): - if k == "project": - if os.environ.get("CDF_PROJECT", "") != v: - if build_env == "dev" or build_env == "local" or build_env == "demo": - print( - f" [bold yellow]WARNING:[/] Project name mismatch (CDF_PROJECT) between local.yaml ({v}) and what is defined in environment ({os.environ.get('CDF_PROJECT','')})." - ) - print(f" Environment is {build_env}, continuing (would have stopped for staging and prod)...") - else: - print( - f" [bold red]ERROR:[/]Project name mismatch (CDF_PROJECT) between local.yaml ({v}) and what is defined in environment ({os.environ['CDF_PROJECT']})." - ) - exit(1) - elif k == "type": - os.environ["CDF_BUILD_TYPE"] = v - elif k == "deploy": - print(f" [bold green]INFO:[/] Building module list for environment {build_env}...") - for m in v: - for g2, g3 in packages.items(): - if m == g2: - if verbose: - print(f" Including modules from package {m}: {g3}") - for m2 in g3: - if m2 not in modules: - modules.append(m2) - elif m not in modules and packages.get(m) is None: - if verbose: - print(f" Including explicitly defined module {m}") - modules.append(m) - if set_env_only: - return [] - if len(modules) == 0: - print( - f" [bold yellow]WARNING:[/] Found no defined modules in local.yaml, have you configured the environment ({build_env})?" - ) - load_list = [] - module_dirs = {} - for d in tmpl_dirs: - if not module_dirs.get(d): - module_dirs[d] = [] + def set_environment_variables(self): + os.environ["CDF_ENVIRON"] = self.name + os.environ["CDF_BUILD_TYPE"] = self.build_type + + +@dataclass +class SystemVariables: + cdf_toolkit_version: str + + @classmethod + def load(cls, data: dict[str, Any], action: Literal["build", "deploy", "clean"]) -> SystemVariables: + file_name = BUILD_ENVIRONMENT_FILE if action in {"deploy", "clean"} else ENVIRONMENTS_FILE try: - for dirnames in Path(d).iterdir(): - module_dirs[d].append(dirnames.name) - except Exception: - ... - for m in modules: - found = False - for dir, mod in module_dirs.items(): - if m in mod: - load_list.append(f"{dir}/{m}") - found = True - break - if not found: - raise ValueError(f"Module {m} not found in template directories {tmpl_dirs}.") - return load_list + system = SystemVariables(cdf_toolkit_version=data["__system"]["cdf_toolkit_version"]) + except KeyError: + print( + f" [bold red]ERROR:[/] System variables are missing required field 'cdf_toolkit_version' in {file_name!s}" + ) + if action in {"deploy", "clean"}: + print(f" rerun `cdf-tk build` to build the templates again and create `{file_name!s}` correctly.") + elif action == "build": + print( + f" run `cdf-tk init --upgrade` to initialize the templates again and create a correct `{file_name!s}` file." + ) + exit(1) + if system.cdf_toolkit_version != _version.__version__: + print( + f" [bold red]Error:[/] The version of the templates ({system.cdf_toolkit_version}) does not match the version of the installed package ({_version.__version__})." + ) + print(" Please either run `cdf-tk init --upgrade` to upgrade the templates OR") + print(f" run `pip install cognite-toolkit==={system.cdf_toolkit_version}` to downgrade cdf-tk.") + exit(1) + return system def get_selected_modules( source_module: Path, - environment_file: Path, - build_env: str = "dev", + selected_module_and_packages: list[str], + build_env: str, verbose: bool = False, ) -> list[str]: - print(f" Environment is {build_env}, using that section in {ENVIRONMENTS_FILE!s}.\n") - modules_by_package = _read_packages(source_module, verbose) - - selected_module_and_packages = _get_modules_and_packages(environment_file, build_env) - selected_packages = [package for package in selected_module_and_packages if package in modules_by_package] if verbose: print(" [bold green]INFO:[/] Selected packages:") @@ -171,37 +168,6 @@ def get_selected_modules( exit(1) -def _get_modules_and_packages(environment_file: Path, build_env: str) -> list[str]: - environment_config = read_yaml_file(environment_file) - environment = environment_config.get(build_env) - if environment is None: - raise ValueError(f"Environment {build_env} not found in {ENVIRONMENTS_FILE!s}") - try: - project_config = environment["project"] - environment_type = environment["type"] - deploy = environment["deploy"] - except KeyError: - print( - f" [bold red]ERROR:[/] Environment {build_env} is missing required fields 'project', 'type', or 'deploy' in {ENVIRONMENTS_FILE!s}" - ) - exit(1) - - os.environ["CDF_ENVIRON"] = build_env - os.environ["CDF_BUILD_TYPE"] = environment_type - if (project_env := os.environ.get("CDF_PROJECT", "")) != project_config: - if build_env == "dev" or build_env == "local" or build_env == "demo": - print( - f" [bold yellow]WARNING:[/] Project name mismatch (CDF_PROJECT) between {ENVIRONMENTS_FILE!s} ({project_config}) and what is defined in environment ({project_env})." - ) - print(f" Environment is {build_env}, continuing (would have stopped for staging and prod)...") - else: - print( - f" [bold red]ERROR:[/] Project name mismatch (CDF_PROJECT) between {ENVIRONMENTS_FILE!s} ({project_config}) and what is defined in environment ({project_env=} != {project_config=})." - ) - exit(1) - return deploy - - def _read_packages(source_module, verbose): cdf_modules_by_packages = read_yaml_file(source_module / COGNITE_MODULES / DEFAULT_PACKAGES_FILE).get( "packages", {} @@ -225,44 +191,6 @@ def _read_packages(source_module, verbose): return modules_by_package -def read_yaml_files( - yaml_dirs: list[str] | str, - name: str | None = None, -) -> dict[str, Any]: - """Read all YAML files in the given directories and return a dictionary - - This function will not traverse into sub-directories. - - yaml_dirs: list of directories to read YAML files from - name: (optional) name of the file(s) to read, either filename or regex. Defaults to config.yaml and default.config.yaml - """ - - if isinstance(yaml_dirs, str): - yaml_dirs = [yaml_dirs] - files = [] - if name is None: - # Order is important! - for directory in yaml_dirs: - files.extend(Path(directory).glob("default.config.yaml")) - files.extend(Path(directory).glob("config.yaml")) - else: - name = re.compile(f"^{name}") - for directory in yaml_dirs: - for file in Path(directory).glob("*.yaml"): - if not (name.match(file.name)): - continue - files.append(file) - data = {} - for yaml_file in files: - try: - config_data = yaml.safe_load(yaml_file.read_text()) - except yaml.YAMLError as e: - print(f" [bold red]ERROR:[/] reading {yaml_file}: {e}") - continue - data.update(config_data) - return data - - @overload def read_yaml_file(filepath: Path, expected_output: Literal["dict"] = "dict") -> dict[str, Any]: ... @@ -467,25 +395,23 @@ def build_config( build_dir: Path, source_dir: Path, config_file: Path, - environment_file: Path, - build_env: str = "dev", + build: BuildEnvironment, clean: bool = False, verbose: bool = False, ): - if build_env is None: - raise ValueError("build_env must be specified") - if build_dir.exists(): - if any(build_dir.iterdir()): - if clean: - shutil.rmtree(build_dir) - build_dir.mkdir() - print(f" [bold green]INFO:[/] Cleaned existing build directory {build_dir!s}.") - else: - print(" [bold yellow]WARNING:[/] Build directory is not empty. Use --clean to remove existing files.") + is_populated = build_dir.exists() and any(build_dir.iterdir()) + if is_populated and clean: + shutil.rmtree(build_dir) + build_dir.mkdir() + print(f" [bold green]INFO:[/] Cleaned existing build directory {build_dir!s}.") + elif is_populated: + print(" [bold yellow]WARNING:[/] Build directory is not empty. Use --clean to remove existing files.") else: build_dir.mkdir() - selected_modules = get_selected_modules(source_dir, environment_file, build_env, verbose) + build.validate_environment() + + selected_modules = get_selected_modules(source_dir, build.deploy, build.name, verbose) config = read_yaml_file(config_file) warnings = validate_config_yaml(config, config_file) @@ -493,7 +419,9 @@ def build_config( print(" [bold yellow]WARNING:[/] Found the following warnings in config.yaml:") for warning in warnings: print(f" {warning}") - process_config_files(source_dir, selected_modules, build_dir, config, build_env, verbose) + process_config_files(source_dir, selected_modules, build_dir, config, build.name, verbose) + build.dump_to_file(build_dir) + print(f" [bold green]INFO:[/] Build complete. Files are located in {build_dir!s}.") def generate_config( diff --git a/cognite_toolkit/environments.yaml b/cognite_toolkit/environments.yaml index ca6ed0597..1057f19f7 100644 --- a/cognite_toolkit/environments.yaml +++ b/cognite_toolkit/environments.yaml @@ -47,4 +47,10 @@ prod: project: type: prod deploy: - - cdf_infield \ No newline at end of file + - cdf_infield + + +# DO NOT EDIT THE LINES BELOW! +# This part is used by cdf-toolkit to keep track of the version and help you upgrade. +__system: + cdf_toolkit_version: 0.1.0a3 diff --git a/demo/environments.yaml b/demo/environments.yaml index c63eebb3d..c1fd66f98 100644 --- a/demo/environments.yaml +++ b/demo/environments.yaml @@ -22,3 +22,8 @@ demo: - cdf_demo_infield - cdf_oid_example_data - cdf_data_pipeline_asset_valhall + +# DO NOT EDIT THE LINES BELOW! +# This part is used by cdf-toolkit to keep track of the version and help you upgrade. +__system: + cdf_toolkit_version: 0.1.0a3 diff --git a/tests/test_approval_modules.py b/tests/test_approval_modules.py index a48504154..7aba210ef 100644 --- a/tests/test_approval_modules.py +++ b/tests/test_approval_modules.py @@ -17,8 +17,9 @@ import typer from pytest import MonkeyPatch +from cognite_toolkit import _version from cognite_toolkit.cdf import Common, build, clean, deploy, main_init -from cognite_toolkit.cdf_tk.templates import COGNITE_MODULES, iterate_modules, read_yaml_file, read_yaml_files +from cognite_toolkit.cdf_tk.templates import COGNITE_MODULES, iterate_modules, read_yaml_file from cognite_toolkit.cdf_tk.utils import CDFToolConfig from tests.conftest import ApprovalCogniteClient @@ -98,27 +99,19 @@ def typer_context(cdf_tool_config: CDFToolConfig) -> typer.Context: return context -def mock_read_yaml_files(module_path: Path, monkeypatch: MonkeyPatch) -> None: - def fake_read_yaml_files( - yaml_dirs: list[str], - name: str | None = None, - ) -> dict[str, Any]: - if name == "local.yaml": - return {"dev": {"project": "pytest-project", "type": "dev", "deploy": [module_path.name]}} - return read_yaml_files(yaml_dirs, name) - - monkeypatch.setattr("cognite_toolkit.cdf_tk.templates.read_yaml_files", fake_read_yaml_files) - - def mock_read_yaml_file(module_path: Path, monkeypatch: MonkeyPatch) -> None: def fake_read_yaml_file( filepath: Path, expected_output: Literal["list", "dict"] = "dict" ) -> dict[str, Any] | list[dict[str, Any]]: if filepath.name == "environments.yaml": - return {"dev": {"project": "pytest-project", "type": "dev", "deploy": [module_path.name]}} + return { + "dev": {"project": "pytest-project", "type": "dev", "deploy": [module_path.name]}, + "__system": {"cdf_toolkit_version": _version.__version__}, + } return read_yaml_file(filepath, expected_output) monkeypatch.setattr("cognite_toolkit.cdf_tk.templates.read_yaml_file", fake_read_yaml_file) + monkeypatch.setattr("cognite_toolkit.cdf.read_yaml_file", fake_read_yaml_file) @pytest.fixture @@ -147,7 +140,6 @@ def test_deploy_module_approval( init_project: None, data_regression, ) -> None: - mock_read_yaml_files(module_path, monkeypatch) mock_read_yaml_file(module_path, monkeypatch) build( @@ -188,7 +180,6 @@ def test_deploy_dry_run_module_approval( typer_context: typer.Context, init_project: None, ) -> None: - mock_read_yaml_files(module_path, monkeypatch) mock_read_yaml_file(module_path, monkeypatch) build( @@ -232,7 +223,6 @@ def test_clean_module_approval( typer_context: typer.Context, data_regression, ) -> None: - mock_read_yaml_files(module_path, monkeypatch) mock_read_yaml_file(module_path, monkeypatch) main_init( @@ -255,7 +245,7 @@ def test_clean_module_approval( clean( typer_context, build_dir=str(local_tmp_path), - build_env="test", + build_env="dev", interactive=False, dry_run=False, include=[], diff --git a/tests/test_build.py b/tests/test_build.py index 900bccc77..e1474dca3 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -8,7 +8,7 @@ import yaml from packaging.version import Version -from cognite_toolkit._version import __template_version__, __version__ +from cognite_toolkit._version import __version__ from cognite_toolkit.cdf_tk.templates import generate_config from tests.constants import REPO_ROOT @@ -29,7 +29,7 @@ def test_pyproj_version_matches() -> None: @pytest.mark.parametrize( "package_version, changelog_name", - [(__version__, "CHANGELOG.cdf-tk.md"), (__template_version__, "CHANGELOG.templates.md")], + [(__version__, "CHANGELOG.cdf-tk.md"), (__version__, "CHANGELOG.templates.md")], ) def test_changelog_entry_version_matches(package_version: str, changelog_name: str) -> None: match = next(_parse_changelog(changelog_name)) @@ -82,6 +82,17 @@ def test_config_yaml_updated() -> None: ) +def test_environment_system_variables_updated() -> None: + environments_yaml = yaml.safe_load( + (REPO_ROOT / "cognite_toolkit" / "environments.yaml").read_text(encoding="utf-8") + ) + system_variables = environments_yaml["__system"] + + assert ( + system_variables["cdf_toolkit_version"] == __version__ + ), "The 'cdf_tk_version' system variable is not up to date." + + def _parse_changelog(changelog: str) -> Iterator[Match[str]]: changelog = (REPO_ROOT / changelog).read_text(encoding="utf-8") return re.finditer(r"##\s\[(\d+\.\d+\.\d+(a\d+)?)\]\s-\s(\d+-\d+-\d+)", changelog) From e2a1eef991c0c903dc07d7251872553cea01c412 Mon Sep 17 00:00:00 2001 From: Greger Teigre Wedel Date: Fri, 15 Dec 2023 12:43:47 +0100 Subject: [PATCH 72/90] CDF-20462: Add data sets to the infield_location module (#226) * Add data set to the module * Add changelog * Update tests --- CHANGELOG.cdf-tk.md | 2 +- CHANGELOG.templates.md | 2 ++ ...et.yaml.tmpl => location_source_data_set.yaml} | 13 ++++++------- .../cdf_infield_location.yaml | 15 +++++++++++++++ 4 files changed, 24 insertions(+), 8 deletions(-) rename cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/{location_source_data_set.yaml.tmpl => location_source_data_set.yaml} (72%) diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index 7e3aa013f..950888f24 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -24,7 +24,7 @@ Changes are grouped as follows: - Check for whether template variables `` are present in the config files. - Check for whether data set id is present in the config files. - Print table at the end of `cdf-tk deploy` with the resources that were created, deleted, and skipped. -- Support for Extraction Pipelines and Extraction Pipeline configuration for remotely configured Extractors +- Support for Extraction Pipelines and Extraction Pipeline configuration for remotely configured Extractors ### Removed diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index 9fc21b00d..396a03c34 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -37,6 +37,8 @@ Changes are grouped as follows: - Combined the child and parent transformations `sync_assets_from_hierarchy_to_apm` in `cdf_infield_location`. This has the benefit of not having to wait for the parent transformation to finish before starting the child transformation, thus no longer a dependency between the two transformations. +- Added all datasets to the `cdf_infield_locaton` module that previously were just a template, but created in `cdf_oid_example_data`. + If both modules are used, the datasets will be attempted created twice, but this is not a problem. ### Fixed diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/location_source_data_set.yaml.tmpl b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/location_source_data_set.yaml similarity index 72% rename from cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/location_source_data_set.yaml.tmpl rename to cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/location_source_data_set.yaml index 1c681f407..d7d310381 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/location_source_data_set.yaml.tmpl +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/location_source_data_set.yaml @@ -1,18 +1,17 @@ -# NOTE!!! If you do not have an existing data set with asset data, you need to create one for -# each Infield location. Rename this file to .yaml and change the configuration to match your data set. -# +# NOTE!!! These data sets are also created in the cdf_oid_example_data module to +# ensure that they are created independent on whether the demo data are used. - externalId: ds_asset_{{default_location}} name: asset:{{default_location}} description: This dataset contains asset data for the {{default_location}} location. -- externalId: ds_3d_{{default_location}} - name: 3d:{{default_location}} - description: This dataset contains 3D data for the {{default_location}} location. - externalId: ds_files_{{default_location}} name: files:{{default_location}} description: This dataset contains files for the {{default_location}} location. - externalId: ds_timeseries_{{default_location}} name: timeseries:{{default_location}} - description: This dataset contains timeseries data for the {{default_location}} location. + description: This dataset contains timeseries for the {{default_location}} location. +- externalId: ds_3d_{{default_location}} + name: 3d:{{default_location}} + description: This dataset contains 3D data for the {{default_location}} location. - externalId: ds_relationships_{{default_location}} name: relationships:{{default_location}} description: This dataset contains relationships data for the {{default_location}} location. diff --git a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml index c969f3740..c60f6942f 100644 --- a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml +++ b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml @@ -1,7 +1,22 @@ DataSet: +- description: This dataset contains 3D data for the oid location. + externalId: ds_3d_oid + name: 3d:oid +- description: This dataset contains asset data for the oid location. + externalId: ds_asset_oid + name: asset:oid +- description: This dataset contains files for the oid location. + externalId: ds_files_oid + name: files:oid - description: This dataset is for writing data from Infield for the location oid. externalId: ds_infield_oid_app_data name: infield:oid:app_data +- description: This dataset contains relationships data for the oid location. + externalId: ds_relationships_oid + name: relationships:oid +- description: This dataset contains timeseries for the oid location. + externalId: ds_timeseries_oid + name: timeseries:oid Group: - capabilities: - groupsAcl: From 0efc60426365877476cb4222919c97413cb879bf Mon Sep 17 00:00:00 2001 From: Anders Albert <60234212+doctrino@users.noreply.github.com> Date: Fri, 15 Dec 2023 12:57:34 +0100 Subject: [PATCH 73/90] Fix cognite_modules Warnings (#220) * fix: added prefix * refactor: Adde OID example dataset * fix: loading of datasets * refactor: Fix warnings * refactor; added missing datasets * refactor: a few more fixes * refactor: Added missing dataset * refactor: update test data * refactor: update config file * build; changelog * refactor: review feedback on naming * tests; force regen * refactor: switch dataset infield * refactor: remove dataSet infield transformations * refactor: Transformation specific warning * tests: updated test data * Update CHANGELOG.templates.md * Align data sets with cdf_oid_example_data * Add back transformations data set * Fix tests --------- Co-authored-by: Greger Wedel --- CHANGELOG.cdf-tk.md | 1 + CHANGELOG.templates.md | 3 + cognite_toolkit/cdf_tk/load.py | 6 +- cognite_toolkit/cdf_tk/templates.py | 2 +- cognite_toolkit/cdf_tk/utils.py | 8 +- .../data_sets/data_sets.yaml | 3 + .../default.config.yaml | 2 +- ...rkmate_apm_simple_load_asset2children.yaml | 1 + ...t_oid_workmate_apm_simple_load_assets.yaml | 1 + ...d_pi_apm_simple_load_timeseries2assets.sql | 2 +- ..._pi_apm_simple_load_timeseries2assets.yaml | 1 + ...id_workmate_apm_simple_load_workitems.yaml | 1 + ...mate_apm_simple_load_workitems2assets.yaml | 1 + ..._apm_simple_load_workitems2workorders.yaml | 1 + ...d_workmate_apm_simple_load_workorders.yaml | 1 + ...ate_apm_simple_load_workorders2assets.yaml | 1 + .../data_sets/data_sets.yaml | 3 + .../cdf_oid_example_data/default.config.yaml | 2 +- ...tr_asset_oid_workmate_asset_hierarchy.yaml | 1 + .../data_sets/data_sets.yaml | 2 +- ...asset_hierarchy-load-collections_pump.yaml | 4 +- .../data_sets/data_sets.yaml | 3 + .../default.config.yaml | 5 +- ...sset_hierarchy_cdf_asset_source_model.yaml | 4 +- .../data_sets/data_sets.yaml | 3 + .../default.config.yaml | 7 +- ...del-populate-lift_station_pumps_edges.yaml | 4 +- .../pump_model-populate-pump_container.yaml | 4 +- .../data_sets/location_source_data_set.yaml | 3 + ...eld_sync_assets_from_hierarchy_to_apm.yaml | 1 + ...eld_sync_workorders_to_apm_activities.yaml | 1 + cognite_toolkit/config.yaml | 14 +- .../cdf_apm_simple_data_model.yaml | 188 ++++++++++-------- .../cdf_asset_source_model.yaml | 63 +++--- .../cdf_data_pipeline_asset_valhall.yaml | 1 + .../cdf_infield_location.yaml | 9 +- .../cdf_oid_example_data.yaml | 4 + .../example_pump_asset_hierarchy.yaml | 11 +- .../example_pump_data_model.yaml | 82 ++++---- .../cdf_apm_simple_data_model.yaml | 16 +- .../cdf_asset_source_model.yaml | 12 +- .../example_pump_asset_hierarchy.yaml | 2 +- .../example_pump_data_model.yaml | 16 +- 43 files changed, 288 insertions(+), 212 deletions(-) create mode 100644 cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_sets/data_sets.yaml create mode 100644 cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/data_sets/data_sets.yaml create mode 100644 cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_sets/data_sets.yaml diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index 950888f24..9ae13c2c4 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -46,6 +46,7 @@ Changes are grouped as follows: This is now fixed by skipping dataset validation when running with `--dry-run`. - When having a `auth` group with mixed capabilities of all scoped and resource scoped, the all scoped capabilities were not removed when running `cdf-tk deploy`. This is now fixed. +- Loading `Transformation` did not support setting `dataSetExternalId` in the yaml config file. This is now fixed. ## [0.1.0a3] - 2023-12-01 diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index 396a03c34..8979c74e5 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -23,6 +23,8 @@ Changes are grouped as follows: - The module `my_example_module` has been added to the `custom_modules` folder. - Added globally defined schedule variables that can be used across all modules. - A complete example of an Asset data pipeline in `examples/cdf_asset_data_pipeline/` shows how to configure an Extractor, monitor the status of the Extraction Pipeline, and load the data into the asset hierarchy using Transformations. +- DataSet to all example modules: `cdf_apm_simple_data_model`, `cdf_asset_source_model`, `cdf_oid_example_data`, + `example_pump_data_model`, `example_pump_asset_hierarchy`. ### Changed @@ -43,6 +45,7 @@ Changes are grouped as follows: ### Fixed - Removed transformation identity provider variables from modules and reused the global cicd_ prefixed ones. +- Ensure all transformations in `cognite_modules` are prefixed with `tr_` and all spaces are prefixed with `sp_`. ## [0.1.0a3] - 2023-11-29 diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 2c5588247..0e6f963e0 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -709,6 +709,10 @@ def load_resource(self, filepath: Path, dry_run: bool) -> Transformation: # The `authentication` key is custom for this template: source_oidc_credentials = raw.get("authentication", {}).get("read") or raw.get("authentication") or {} destination_oidc_credentials = raw.get("authentication", {}).get("write") or raw.get("authentication") or {} + if raw.get("dataSetExternalId") is not None: + ds_external_id = raw.pop("dataSetExternalId") + raw["dataSetId"] = self.ToolGlobals.verify_dataset(ds_external_id) if not dry_run else -1 + transformation = Transformation.load(raw) transformation.source_oidc_credentials = source_oidc_credentials and OidcCredentials.load( source_oidc_credentials @@ -726,8 +730,6 @@ def load_resource(self, filepath: Path, dry_run: bool) -> Transformation: f"Could not find sql file belonging to transformation {filepath.name}. Please run build again." ) transformation.query = sql_file.read_text() - if isinstance(transformation.data_set_id, str): - transformation.data_set_id = self.ToolGlobals.verify_dataset(transformation.data_set_id) return transformation def delete(self, ids: Sequence[str], drop_data: bool) -> int: diff --git a/cognite_toolkit/cdf_tk/templates.py b/cognite_toolkit/cdf_tk/templates.py index 75958573a..36aca4f8e 100644 --- a/cognite_toolkit/cdf_tk/templates.py +++ b/cognite_toolkit/cdf_tk/templates.py @@ -819,7 +819,7 @@ def validate(content: str, destination: Path, source_path: Path) -> None: if load_warnings: print(f" [bold yellow]WARNING:[/] Found potential snake_case issues: {load_warnings!s}") - data_set_warnings = validate_data_set_is_set(parsed, loader.resource_cls, destination) + data_set_warnings = validate_data_set_is_set(parsed, loader.resource_cls, source_path) if data_set_warnings: print(f" [bold yellow]WARNING:[/] Found missing data_sets: {data_set_warnings!s}") diff --git a/cognite_toolkit/cdf_tk/utils.py b/cognite_toolkit/cdf_tk/utils.py index 04e31e909..e69f2cc30 100644 --- a/cognite_toolkit/cdf_tk/utils.py +++ b/cognite_toolkit/cdf_tk/utils.py @@ -461,7 +461,13 @@ def __eq__(self, other: DataSetMissingWarning) -> bool: return (self.id_name, self.id_value, self.filepath) == (other.id_name, other.id_value, other.filepath) def __str__(self): - return f"{type(self).__name__}: It is recommended that you set dataSetExternalId for {self.resource_name}. This is missing in {self.filepath.name}. Did you forget to add it?" + # Avoid circular import + from cognite_toolkit.cdf_tk.load import TransformationLoader + + if self.filepath.parent.name == TransformationLoader.folder_name: + return f"{type(self).__name__}: It is recommended to use a data set if source or destination can be scoped with a data set. If not, ignore this warning." + else: + return f"{type(self).__name__}: It is recommended that you set dataSetExternalId for {self.resource_name}. This is missing in {self.filepath.name}. Did you forget to add it?" T_Warning = TypeVar("T_Warning", bound=LoadWarning) diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_sets/data_sets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_sets/data_sets.yaml new file mode 100644 index 000000000..279014171 --- /dev/null +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/data_sets/data_sets.yaml @@ -0,0 +1,3 @@ +- externalId: ds_transformations_{{default_location}} + name: transformations:{{default_location}} + description: This dataset contains transformations data for the {{default_location}} location. \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/default.config.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/default.config.yaml index 879eeea59..2654d19d4 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/default.config.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/default.config.yaml @@ -6,7 +6,7 @@ source_asset: workmate source_workorder: workmate source_timeseries: pi datamodel: apm_simple -space: apm_simple +space: sp_apm_simple datamodel_version: '1' view_Asset_version: '1' view_WorkOrder_version: '1' diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml index 8015e18c2..d66bc2d90 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml @@ -12,6 +12,7 @@ destination: ignoreNullFields: true shared: true action: upsert +dataSetExternalId: ds_transformations_{{default_location}} # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml index e4c61b476..bda879069 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml @@ -11,6 +11,7 @@ destination: ignoreNullFields: true shared: true conflictMode: upsert +dataSetExternalId: ds_transformations_{{default_location}} # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.sql b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.sql index c52f1c64e..3ccb22437 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.sql +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.sql @@ -2,4 +2,4 @@ select cast(`asset` as STRING) as externalId, array(timeseries) as metrics from - `files_{{default_location}}_{{source_files}}`.`timeseries2assets`; + `files_{{default_location}}_{{source_timeseries}}`.`timeseries2assets`; diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml index 659d984b2..a28450a1c 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml @@ -11,6 +11,7 @@ destination: ignoreNullFields: true shared: true action: upsert +dataSetExternalId: ds_transformations_{{default_location}} # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml index 6354fa90f..1ed61172e 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml @@ -11,6 +11,7 @@ destination: ignoreNullFields: true shared: true action: upsert +dataSetExternalId: ds_transformations_{{default_location}} # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml index 52c9aa386..531753d41 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml @@ -12,6 +12,7 @@ destination: ignoreNullFields: true shared: true action: upsert +dataSetExternalId: ds_transformations_{{default_location}} # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml index b68e562f5..83957f19d 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml @@ -12,6 +12,7 @@ destination: ignoreNullFields: true shared: true action: upsert +dataSetExternalId: ds_transformations_{{default_location}} # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml index b3cf23827..9bdd852f3 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml @@ -11,6 +11,7 @@ destination: ignoreNullFields: true shared: true action: upsert +dataSetExternalId: ds_transformations_{{default_location}} # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml index 32764fd2f..6c7bda754 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml @@ -12,6 +12,7 @@ destination: ignoreNullFields: true shared: true action: upsert +dataSetExternalId: ds_transformations_{{default_location}} # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: diff --git a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/data_sets/data_sets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/data_sets/data_sets.yaml index 18f0184a3..3302f958e 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/data_sets/data_sets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/data_sets/data_sets.yaml @@ -13,3 +13,6 @@ - externalId: ds_relationships_{{default_location}} name: relationships:{{default_location}} description: This dataset contains relationships data for the {{default_location}} location. +- externalId: ds_transformations_{{default_location}} + name: transformations:{{default_location}} + description: This dataset contains transformations data for the {{default_location}} location. \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/default.config.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/default.config.yaml index b2ac53ecd..fe1139623 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/default.config.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/default.config.yaml @@ -14,4 +14,4 @@ default_location: oid source_asset: workmate source_workorder: workmate source_files: fileshare -source_timeseries: pi \ No newline at end of file +source_timeseries: pi diff --git a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml index a26d32e70..145ea9600 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml @@ -5,6 +5,7 @@ destination: ignoreNullFields: true shared: true conflictMode: upsert +dataSetExternalId: ds_transformations_{{default_location}} # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: diff --git a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/data_sets/data_sets.yaml b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/data_sets/data_sets.yaml index 6a0e73a8a..138d5e9d8 100644 --- a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/data_sets/data_sets.yaml +++ b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/data_sets/data_sets.yaml @@ -1,3 +1,3 @@ - externalId: {{data_set}} name: Lift Pump Stations - description: This dataset contains the lift pump stations. + description: This dataset contains the lift pump stations. \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml index 7d0b5fc45..2b787d7c7 100644 --- a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml +++ b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml @@ -1,8 +1,8 @@ -externalId: pump_asset_hierarchy-load-collections_pump +externalId: tr_pump_asset_hierarchy-load-collections_pump name: pump_asset_hierarchy-load-collections_pump destination: type: asset_hierarchy - +dataSetExternalId: {{data_set}} # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: diff --git a/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/data_sets/data_sets.yaml b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/data_sets/data_sets.yaml new file mode 100644 index 000000000..bce96bfdc --- /dev/null +++ b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/data_sets/data_sets.yaml @@ -0,0 +1,3 @@ +- externalId: {{data_set}} + name: Example Asset Source Data + description: This dataset contains the example asset source data. diff --git a/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/default.config.yaml b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/default.config.yaml index 005a7e719..262c9d356 100644 --- a/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/default.config.yaml +++ b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/default.config.yaml @@ -1,6 +1,7 @@ # Only valid for this module, loads template variables from environment -model_space: ExtendedSourceDataModels -instance_space: cdfTemplateInstances +model_space: sp_extended_source_data_models +instance_space: sp_cdf_template_instances view_asset_version: '1' data_model_version: '1' root_asset_external_id: 'lift_pump_stations:root' +data_set: ds_example_asset_source_data \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml index 0c38c60af..adb807857 100644 --- a/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml +++ b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml @@ -1,4 +1,4 @@ -externalId: sync-asset_hierarchy_cdf_asset_source_model +externalId: tr_sync-asset_hierarchy_cdf_asset_source_model name: sync-asset_hierarchy_cdf_asset_source_model destination: dataModel: @@ -8,7 +8,7 @@ destination: destinationType: Asset instanceSpace: {{instance_space}} type: instances - +dataSetExternalId: {{data_set}} # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: diff --git a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_sets/data_sets.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_sets/data_sets.yaml new file mode 100644 index 000000000..fb1eb645a --- /dev/null +++ b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_sets/data_sets.yaml @@ -0,0 +1,3 @@ +- externalId: {{data_set}} + name: Example Pump Data + description: This dataset contains the example pump data. diff --git a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/default.config.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/default.config.yaml index 0690697dd..e69ff1c38 100644 --- a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/default.config.yaml +++ b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/default.config.yaml @@ -1,9 +1,10 @@ # Only valid for this module, loads template variables from environment -model_space: pumpModelSpace -instance_space: pumpInstanceSpace -source_model_space: ExtendedSourceDataModels +model_space: sp_pump_model_space +instance_space: sp_pump_instance_space +source_model_space: sp_extended_source_data_models source_model: ExtendedSourceData view_Pump_version: "1" view_LiftStation_version: "1" data_model_version: "1" data_model: "PumpLiftStations" +data_set: ds_example_pump_data diff --git a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml index b1f0a2cfd..870b8df46 100644 --- a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml +++ b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml @@ -1,4 +1,4 @@ -externalId: pump_model-populate-lift_station_pumps_edges +externalId: tr_pump_model-populate-lift_station_pumps_edges name: pump_model-populate-lift_station_pumps_edges destination: dataModel: @@ -9,7 +9,7 @@ destination: destinationRelationshipFromType: pumps instanceSpace: {{instance_space}} type: instances - +dataSetExternalId: {{data_set}} # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: diff --git a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml index 0d3987ac7..a2b598789 100644 --- a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml +++ b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml @@ -1,4 +1,4 @@ -externalId: pump_model-populate-pump_container +externalId: tr_pump_model-populate-pump_container name: pump_model-populate-pump_container destination: dataModel: @@ -8,7 +8,7 @@ destination: destinationType: Pump instanceSpace: {{instance_space}} type: instances - +dataSetExternalId: {{data_set}} # Specify credentials separately like this: # You can also use different credentials for the running transformations than the ones you use to deploy authentication: diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/location_source_data_set.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/location_source_data_set.yaml index d7d310381..bfe76ffa1 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/location_source_data_set.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/data_sets/location_source_data_set.yaml @@ -15,3 +15,6 @@ - externalId: ds_relationships_{{default_location}} name: relationships:{{default_location}} description: This dataset contains relationships data for the {{default_location}} location. +- externalId: ds_transformations_{{default_location}} + name: transformations:{{default_location}} + description: This dataset contains transformations data for the {{default_location}} location. \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml index 411e651da..9d34ce166 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml @@ -8,6 +8,7 @@ destination: instanceSpace: 'sp_asset_{{default_location}}_source' type: nodes ignoreNullFields: true +dataSetExternalId: ds_transformations_{{default_location}} shared: true action: upsert # Specify credentials separately like this: diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml index 6ba6fee82..f43aa9494 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml @@ -9,6 +9,7 @@ destination: instanceSpace: 'sp_asset_{{default_location}}_source' type: instances ignoreNullFields: true +dataSetExternalId: ds_transformations_{{default_location}} shared: true action: upsert # Specify credentials separately like this: diff --git a/cognite_toolkit/config.yaml b/cognite_toolkit/config.yaml index 10113c844..82f48602f 100644 --- a/cognite_toolkit/config.yaml +++ b/cognite_toolkit/config.yaml @@ -39,7 +39,7 @@ cognite_modules: source_asset: workmate source_timeseries: pi source_workorder: workmate - space: apm_simple + space: sp_apm_simple view_Asset_version: '1' view_WorkItem_version: '1' view_WorkOrder_version: '1' @@ -101,8 +101,9 @@ cognite_modules: # Only valid for this module, loads template variables from environment cdf_asset_source_model: data_model_version: '1' - instance_space: cdfTemplateInstances - model_space: ExtendedSourceDataModels + data_set: ds_example_asset_source_data + instance_space: sp_cdf_template_instances + model_space: sp_extended_source_data_models root_asset_external_id: lift_pump_stations:root view_asset_version: '1' @@ -110,10 +111,11 @@ cognite_modules: example_pump_data_model: data_model: PumpLiftStations data_model_version: '1' - instance_space: pumpInstanceSpace - model_space: pumpModelSpace + data_set: ds_example_pump_data + instance_space: sp_pump_instance_space + model_space: sp_pump_model_space source_model: ExtendedSourceData - source_model_space: ExtendedSourceDataModels + source_model_space: sp_extended_source_data_models view_LiftStation_version: '1' view_Pump_version: '1' diff --git a/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml b/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml index 80291771b..e1c8d6a9a 100644 --- a/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml +++ b/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml @@ -88,7 +88,7 @@ Container: type: list: false type: timestamp - space: apm_simple + space: sp_apm_simple usedFor: node - externalId: WorkItem name: WorkItem @@ -161,7 +161,7 @@ Container: nullable: true type: type: direct - space: apm_simple + space: sp_apm_simple usedFor: node - externalId: WorkOrder name: WorkOrder @@ -306,39 +306,44 @@ Container: collation: ucs_basic list: false type: text - space: apm_simple + space: sp_apm_simple usedFor: node DataModel: - externalId: apm_simple name: apm_simple - space: apm_simple + space: sp_apm_simple version: '1' views: - externalId: Asset - space: apm_simple + space: sp_apm_simple type: view version: '1' - externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: view version: '1' - externalId: WorkItem - space: apm_simple + space: sp_apm_simple type: view version: '1' +DataSet: +- description: This dataset contains transformations data for the oid location. + externalId: ds_transformations_oid + name: transformations:oid Space: - description: Space for APM simple data model - name: apm_simple - space: apm_simple + name: sp_apm_simple + space: sp_apm_simple Transformation: -- destination: +- dataSetId: 42 + destination: dataModel: destinationRelationshipFromType: children destinationType: Asset externalId: apm_simple - space: apm_simple + space: sp_apm_simple version: '1' - instanceSpace: apm_simple + instanceSpace: sp_apm_simple type: instances destinationOidcCredentials: audience: ${IDP_AUDIENCE} @@ -367,14 +372,15 @@ Transformation: scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} - conflictMode: upsert + dataSetId: 42 destination: dataModel: destinationRelationshipFromType: null destinationType: Asset externalId: apm_simple - space: apm_simple + space: sp_apm_simple version: '1' - instanceSpace: apm_simple + instanceSpace: sp_apm_simple type: instances destinationOidcCredentials: audience: ${IDP_AUDIENCE} @@ -406,14 +412,15 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- destination: +- dataSetId: 42 + destination: dataModel: destinationRelationshipFromType: null destinationType: Asset externalId: apm_simple - space: apm_simple + space: sp_apm_simple version: '1' - instanceSpace: apm_simple + instanceSpace: sp_apm_simple type: instances destinationOidcCredentials: audience: ${IDP_AUDIENCE} @@ -428,7 +435,7 @@ Transformation: name: timeseries:oid:pi:apm_simple:load_timeseries2assets ownerIsCurrentUser: true query: "select\n cast(`asset` as STRING) as externalId,\n array(timeseries) as\ - \ metrics\nfrom\n `files_oid_{{source_files}}`.`timeseries2assets`;\n" + \ metrics\nfrom\n `files_oid_pi`.`timeseries2assets`;\n" schedule: externalId: tr_timeseries_oid_pi_apm_simple_load_timeseries2assets interval: 7 * * * * @@ -440,14 +447,15 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- destination: +- dataSetId: 42 + destination: dataModel: destinationRelationshipFromType: null destinationType: WorkItem externalId: apm_simple - space: apm_simple + space: sp_apm_simple version: '1' - instanceSpace: apm_simple + instanceSpace: sp_apm_simple type: instances destinationOidcCredentials: audience: ${IDP_AUDIENCE} @@ -478,14 +486,15 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- destination: +- dataSetId: 42 + destination: dataModel: destinationRelationshipFromType: linkedAssets destinationType: WorkItem externalId: apm_simple - space: apm_simple + space: sp_apm_simple version: '1' - instanceSpace: apm_simple + instanceSpace: sp_apm_simple type: instances destinationOidcCredentials: audience: ${IDP_AUDIENCE} @@ -519,14 +528,15 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- destination: +- dataSetId: 42 + destination: dataModel: destinationRelationshipFromType: workItems destinationType: WorkOrder externalId: apm_simple - space: apm_simple + space: sp_apm_simple version: '1' - instanceSpace: apm_simple + instanceSpace: sp_apm_simple type: instances destinationOidcCredentials: audience: ${IDP_AUDIENCE} @@ -560,14 +570,15 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- destination: +- dataSetId: 42 + destination: dataModel: destinationRelationshipFromType: null destinationType: WorkOrder externalId: apm_simple - space: apm_simple + space: sp_apm_simple version: '1' - instanceSpace: apm_simple + instanceSpace: sp_apm_simple type: instances destinationOidcCredentials: audience: ${IDP_AUDIENCE} @@ -605,14 +616,15 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- destination: +- dataSetId: 42 + destination: dataModel: destinationRelationshipFromType: linkedAssets destinationType: WorkOrder externalId: apm_simple - space: apm_simple + space: sp_apm_simple version: '1' - instanceSpace: apm_simple + instanceSpace: sp_apm_simple type: instances destinationOidcCredentials: audience: ${IDP_AUDIENCE} @@ -672,14 +684,14 @@ View: areaId: container: externalId: Asset - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: areaId name: areaId categoryId: container: externalId: Asset - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: categoryId name: categoryId @@ -689,83 +701,83 @@ View: name: children source: externalId: Asset - space: apm_simple + space: sp_apm_simple type: view version: '1' type: externalId: Asset.children - space: apm_simple + space: sp_apm_simple createdDate: container: externalId: Asset - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: createdDate name: createdDate description: container: externalId: Asset - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: description name: description documents: container: externalId: Asset - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: documents name: documents isActive: container: externalId: Asset - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: isActive name: isActive isCriticalLine: container: externalId: Asset - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: isCriticalLine name: isCriticalLine metrics: container: externalId: Asset - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: metrics name: metrics parent: container: externalId: Asset - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: parent name: parent sourceDb: container: externalId: Asset - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: sourceDb name: sourceDb tag: container: externalId: Asset - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: tag name: tag updatedDate: container: externalId: Asset - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: updatedDate name: updatedDate - space: apm_simple + space: sp_apm_simple version: '1' - externalId: WorkItem name: WorkItem @@ -773,35 +785,35 @@ View: criticality: container: externalId: WorkItem - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: criticality name: criticality description: container: externalId: WorkItem - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: description name: description isCompleted: container: externalId: WorkItem - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: isCompleted name: isCompleted itemInfo: container: externalId: WorkItem - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: itemInfo name: itemInfo itemName: container: externalId: WorkItem - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: itemName name: itemName @@ -811,41 +823,41 @@ View: name: linkedAssets source: externalId: Asset - space: apm_simple + space: sp_apm_simple type: view version: '1' type: externalId: WorkItem.linkedAssets - space: apm_simple + space: sp_apm_simple method: container: externalId: WorkItem - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: method name: method title: container: externalId: WorkItem - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: title name: title toBeDone: container: externalId: WorkItem - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: toBeDone name: toBeDone workOrder: container: externalId: WorkItem - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: workOrder name: workOrder - space: apm_simple + space: sp_apm_simple version: '1' - externalId: WorkOrder name: WorkOrder @@ -853,70 +865,70 @@ View: actualHours: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: actualHours name: actualHours createdDate: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: createdDate name: createdDate description: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: description name: description dueDate: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: dueDate name: dueDate durationHours: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: durationHours name: durationHours endTime: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: endTime name: endTime isActive: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: isActive name: isActive isCancelled: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: isCancelled name: isCancelled isCompleted: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: isCompleted name: isCompleted isSafetyCritical: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: isSafetyCritical name: isSafetyCritical @@ -926,58 +938,58 @@ View: name: linkedAssets source: externalId: Asset - space: apm_simple + space: sp_apm_simple type: view version: 8069f1498c7f9a type: externalId: WorkOrder.linkedAssets - space: apm_simple + space: sp_apm_simple percentageProgress: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: percentageProgress name: percentageProgress plannedStart: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: plannedStart name: plannedStart priorityDescription: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: priorityDescription name: priorityDescription programNumber: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: programNumber name: programNumber startTime: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: startTime name: startTime status: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: status name: status title: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: title name: title @@ -987,32 +999,32 @@ View: name: workItems source: externalId: WorkItem - space: apm_simple + space: sp_apm_simple type: view version: 94d2b7121128a2 type: externalId: WorkOrder.workItems - space: apm_simple + space: sp_apm_simple workOrderNumber: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: workOrderNumber name: workOrderNumber workPackageNumber: container: externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container containerPropertyIdentifier: workPackageNumber name: workPackageNumber - space: apm_simple + space: sp_apm_simple version: '1' deleted: DataModel: - externalId: apm_simple - space: apm_simple + space: sp_apm_simple type: datamodel version: '1' Transformation: @@ -1026,14 +1038,14 @@ deleted: - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders2assets View: - externalId: Asset - space: apm_simple + space: sp_apm_simple type: view version: '1' - externalId: WorkItem - space: apm_simple + space: sp_apm_simple type: view version: '1' - externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: view version: '1' diff --git a/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml b/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml index 0fb3042ec..222c72e87 100644 --- a/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml +++ b/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml @@ -10,34 +10,39 @@ Container: type: list: false type: json - space: ExtendedSourceDataModels + space: sp_extended_source_data_models usedFor: node DataModel: - externalId: ExtendedSourceData name: ExtendedSourceData - space: ExtendedSourceDataModels + space: sp_extended_source_data_models version: '1' views: - externalId: Asset - space: ExtendedSourceDataModels + space: sp_extended_source_data_models type: view version: '1' +DataSet: +- description: This dataset contains the example asset source data. + externalId: ds_example_asset_source_data + name: Example Asset Source Data Space: -- description: Space for the model in the ExtendedSourceData data model - name: ExtendedSourceDataModels - space: ExtendedSourceDataModels - description: Space for the instances in the ExtendedSourceData data model - name: cdfTemplateInstances - space: cdfTemplateInstances + name: sp_cdf_template_instances + space: sp_cdf_template_instances +- description: Space for the model in the ExtendedSourceData data model + name: sp_extended_source_data_models + space: sp_extended_source_data_models Transformation: -- destination: +- dataSetId: 42 + destination: dataModel: destinationRelationshipFromType: null destinationType: Asset externalId: ExtendedSourceData - space: ExtendedSourceDataModels + space: sp_extended_source_data_models version: '1' - instanceSpace: cdfTemplateInstances + instanceSpace: sp_cdf_template_instances type: instances destinationOidcCredentials: audience: ${IDP_AUDIENCE} @@ -46,7 +51,7 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} - externalId: sync-asset_hierarchy_cdf_asset_source_model + externalId: tr_sync-asset_hierarchy_cdf_asset_source_model ignoreNullFields: false isPublic: true name: sync-asset_hierarchy_cdf_asset_source_model @@ -54,29 +59,29 @@ Transformation: query: "-- Root Asset\n-- The asset must be set up in hierarchical order as the\ \ container for the parent asset requires the\n-- parent asset to be created first.\n\ \nselect\n cast(`externalId` as STRING) as externalId,\n null as parent,\n \ - \ node_reference('cdfTemplateInstances', 'lift_pump_stations:root') as root,\n\ + \ node_reference('sp_cdf_template_instances', 'lift_pump_stations:root') as root,\n\ \ cast(`name` as STRING) as title,\n cast(`source` as STRING) as source,\n \ \ cast(`description` as STRING) as description,\n cast(`labels` as ARRAY < STRING\ \ >) as labels,\n to_json(`metadata`) as metadata\nfrom\n cdf_assetSubtree(\"\ lift_pump_stations:root\")\nwhere\n-- The root asset is created with a null parentExternalId.\n\ \ isnull(`parentExternalId`)\n\nUNION ALL\n-- Pump Stations\nselect\n cast(`externalId`\ - \ as STRING) as externalId,\n node_reference('cdfTemplateInstances', `parentExternalId`)\ - \ as parent,\n node_reference('cdfTemplateInstances', 'lift_pump_stations:root')\ + \ as STRING) as externalId,\n node_reference('sp_cdf_template_instances', `parentExternalId`)\ + \ as parent,\n node_reference('sp_cdf_template_instances', 'lift_pump_stations:root')\ \ as root,\n cast(`name` as STRING) as title,\n cast(`source` as STRING) as\ \ source,\n cast(`description` as STRING) as description,\n cast(`labels` as\ \ ARRAY < STRING >) as labels,\n to_json(`metadata`) as metadata\nfrom\n cdf_assetSubtree('lift_pump_stations:root')\n\ where\n-- This is used to select the Lift Stations.\n isnotnull(`externalId`)\ \ and isnotnull(`parentExternalId`) and not startswith(name, 'Pump')\n\nUNION\ \ ALL\n-- Pumps\nselect\n concat('pump:', cast(`externalId` as STRING)) as externalId,\n\ - \ node_reference('cdfTemplateInstances', `parentExternalId`) as parent,\n node_reference('cdfTemplateInstances',\ - \ 'lift_pump_stations:root') as root,\n cast(`name` as STRING) as title,\n cast(`source`\ - \ as STRING) as source,\n cast(`description` as STRING) as description,\n cast(`labels`\ - \ as ARRAY < STRING >) as labels,\n to_json(`metadata`) as metadata\nfrom\n \ - \ cdf_assetSubtree('lift_pump_stations:root')\nwhere\n-- This is used to select\ - \ the Pumps.\n isnotnull(`externalId`) and isnotnull(`parentExternalId`) and startswith(name,\ - \ 'Pump');\n" + \ node_reference('sp_cdf_template_instances', `parentExternalId`) as parent,\n\ + \ node_reference('sp_cdf_template_instances', 'lift_pump_stations:root') as root,\n\ + \ cast(`name` as STRING) as title,\n cast(`source` as STRING) as source,\n \ + \ cast(`description` as STRING) as description,\n cast(`labels` as ARRAY < STRING\ + \ >) as labels,\n to_json(`metadata`) as metadata\nfrom\n cdf_assetSubtree('lift_pump_stations:root')\n\ + where\n-- This is used to select the Pumps.\n isnotnull(`externalId`) and isnotnull(`parentExternalId`)\ + \ and startswith(name, 'Pump');\n" schedule: - externalId: sync-asset_hierarchy_cdf_asset_source_model + externalId: tr_sync-asset_hierarchy_cdf_asset_source_model interval: 7 * * * * isPaused: true sourceOidcCredentials: @@ -87,7 +92,7 @@ Transformation: scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: -- externalId: sync-asset_hierarchy_cdf_asset_source_model +- externalId: tr_sync-asset_hierarchy_cdf_asset_source_model interval: 7 * * * * isPaused: true View: @@ -102,22 +107,22 @@ View: metadata: container: externalId: Asset - space: ExtendedSourceDataModels + space: sp_extended_source_data_models type: container containerPropertyIdentifier: metadata name: metadata - space: ExtendedSourceDataModels + space: sp_extended_source_data_models version: '1' deleted: DataModel: - externalId: ExtendedSourceData - space: ExtendedSourceDataModels + space: sp_extended_source_data_models type: datamodel version: '1' Transformation: - - externalId: sync-asset_hierarchy_cdf_asset_source_model + - externalId: tr_sync-asset_hierarchy_cdf_asset_source_model View: - externalId: Asset - space: ExtendedSourceDataModels + space: sp_extended_source_data_models type: view version: '1' diff --git a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml index 11de4f075..f93499427 100644 --- a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml +++ b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml @@ -119,6 +119,7 @@ Group: sourceId: Transformation: - conflictMode: upsert + dataSetId: 42 destination: type: asset_hierarchy destinationOidcCredentials: diff --git a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml index c60f6942f..48be16c11 100644 --- a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml +++ b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml @@ -17,6 +17,9 @@ DataSet: - description: This dataset contains timeseries for the oid location. externalId: ds_timeseries_oid name: timeseries:oid +- description: This dataset contains transformations data for the oid location. + externalId: ds_transformations_oid + name: transformations:oid Group: - capabilities: - groupsAcl: @@ -434,7 +437,8 @@ Space: name: sp:infield:oid:source space: sp_asset_oid_source Transformation: -- destination: +- dataSetId: 42 + destination: instanceSpace: sp_asset_oid_source type: nodes view: @@ -479,7 +483,8 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- destination: +- dataSetId: 42 + destination: dataModel: destinationRelationshipFromType: null destinationType: APM_Activity diff --git a/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml b/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml index a394d3b10..810754680 100644 --- a/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml +++ b/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml @@ -14,6 +14,9 @@ DataSet: - description: This dataset contains timeseries for the oid location. externalId: ds_timeseries_oid name: timeseries:oid +- description: This dataset contains transformations data for the oid location. + externalId: ds_transformations_oid + name: transformations:oid FileMetadata: - args: [] kwargs: @@ -656,6 +659,7 @@ TimeSeries: unit: degC Transformation: - conflictMode: upsert + dataSetId: 42 destination: type: asset_hierarchy destinationOidcCredentials: diff --git a/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml b/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml index 133bfb013..20eb083a0 100644 --- a/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml +++ b/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml @@ -43,7 +43,8 @@ Row: table_name: collections_pump name: pump_assets_collections_pump_True Transformation: -- destination: +- dataSetId: 42 + destination: type: asset_hierarchy destinationOidcCredentials: audience: ${IDP_AUDIENCE} @@ -52,7 +53,7 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} - externalId: pump_asset_hierarchy-load-collections_pump + externalId: tr_pump_asset_hierarchy-load-collections_pump ignoreNullFields: false isPublic: true name: pump_asset_hierarchy-load-collections_pump @@ -76,7 +77,7 @@ Transformation: \ Enabled,\n DesignPointHeadFT,\n LowHeadFT,\n FacilityID,\n InstallDate,\n\ \ LifeCycleStatus,\n LocationDescription\n ) as metadata\nfrom `pump_assets`.`collections_pump`\n" schedule: - externalId: pump_asset_hierarchy-load-collections_pump + externalId: tr_pump_asset_hierarchy-load-collections_pump interval: 7 * * * * isPaused: true sourceOidcCredentials: @@ -87,7 +88,7 @@ Transformation: scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: -- externalId: pump_asset_hierarchy-load-collections_pump +- externalId: tr_pump_asset_hierarchy-load-collections_pump interval: 7 * * * * isPaused: true deleted: @@ -96,4 +97,4 @@ deleted: name: - collections_pump Transformation: - - externalId: pump_asset_hierarchy-load-collections_pump + - externalId: tr_pump_asset_hierarchy-load-collections_pump diff --git a/tests/test_approval_modules_snapshots/example_pump_data_model.yaml b/tests/test_approval_modules_snapshots/example_pump_data_model.yaml index e3ff12c36..39fee9783 100644 --- a/tests/test_approval_modules_snapshots/example_pump_data_model.yaml +++ b/tests/test_approval_modules_snapshots/example_pump_data_model.yaml @@ -41,38 +41,43 @@ Container: type: list: false type: float64 - space: pumpModelSpace + space: sp_pump_model_space usedFor: node DataModel: - externalId: PumpLiftStations name: PumpLiftStations - space: pumpModelSpace + space: sp_pump_model_space version: '1' views: - externalId: Pump - space: pumpModelSpace + space: sp_pump_model_space type: view version: '1' - externalId: LiftStation - space: pumpModelSpace + space: sp_pump_model_space type: view version: '1' +DataSet: +- description: This dataset contains the example pump data. + externalId: ds_example_pump_data + name: Example Pump Data Space: - description: Space for the instances. - name: pumpInstanceSpace - space: pumpInstanceSpace + name: sp_pump_instance_space + space: sp_pump_instance_space - description: Space for the Pump Model. - name: pumpModelSpace - space: pumpModelSpace + name: sp_pump_model_space + space: sp_pump_model_space Transformation: -- destination: +- dataSetId: 42 + destination: dataModel: destinationRelationshipFromType: pumps destinationType: LiftStation externalId: PumpLiftStations - space: pumpModelSpace + space: sp_pump_model_space version: '1' - instanceSpace: pumpInstanceSpace + instanceSpace: sp_pump_instance_space type: instances destinationOidcCredentials: audience: ${IDP_AUDIENCE} @@ -81,17 +86,17 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} - externalId: pump_model-populate-lift_station_pumps_edges + externalId: tr_pump_model-populate-lift_station_pumps_edges ignoreNullFields: false isPublic: true name: pump_model-populate-lift_station_pumps_edges ownerIsCurrentUser: true query: "select\n concat(cast(`parent`.externalId as STRING), ':', cast(`externalId`\ - \ as STRING)) as externalId,\n `parent` as startNode,\n node_reference('pumpInstanceSpace',\ - \ cast(`externalId` as STRING)) as endNode\nfrom\n cdf_data_models(\"ExtendedSourceDataModels\"\ + \ as STRING)) as externalId,\n `parent` as startNode,\n node_reference('sp_pump_instance_space',\ + \ cast(`externalId` as STRING)) as endNode\nfrom\n cdf_data_models(\"sp_extended_source_data_models\"\ , \"ExtendedSourceData\", \"1\", \"Asset\")\nwhere\n startswith(title, 'Pump')\n" schedule: - externalId: pump_model-populate-lift_station_pumps_edges + externalId: tr_pump_model-populate-lift_station_pumps_edges interval: 7 * * * * isPaused: true sourceOidcCredentials: @@ -101,14 +106,15 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} -- destination: +- dataSetId: 42 + destination: dataModel: destinationRelationshipFromType: null destinationType: Pump externalId: PumpLiftStations - space: pumpModelSpace + space: sp_pump_model_space version: '1' - instanceSpace: pumpInstanceSpace + instanceSpace: sp_pump_instance_space type: instances destinationOidcCredentials: audience: ${IDP_AUDIENCE} @@ -117,7 +123,7 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} - externalId: pump_model-populate-pump_container + externalId: tr_pump_model-populate-pump_container ignoreNullFields: false isPublic: true name: pump_model-populate-pump_container @@ -127,10 +133,10 @@ Transformation: \ '$.LowHeadFT') as DOUBLE) as LowHeadFT,\n cast(get_json_object(`metadata`,\ \ '$.DesignPointFlowGPM') as DOUBLE) as DesignPointFlowGPM,\n cast(get_json_object(`metadata`,\ \ '$.LowHeadFlowGPM') as DOUBLE) as LowHeadFlowGPM\nfrom\n cdf_data_models(\"\ - ExtendedSourceDataModels\", \"ExtendedSourceData\", \"1\", \"Asset\")\nwhere\n\ - \ startswith(title, 'Pump')\n" + sp_extended_source_data_models\", \"ExtendedSourceData\", \"1\", \"Asset\")\n\ + where\n startswith(title, 'Pump')\n" schedule: - externalId: pump_model-populate-pump_container + externalId: tr_pump_model-populate-pump_container interval: 7 * * * * isPaused: true sourceOidcCredentials: @@ -141,10 +147,10 @@ Transformation: scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: -- externalId: pump_model-populate-lift_station_pumps_edges +- externalId: tr_pump_model-populate-lift_station_pumps_edges interval: 7 * * * * isPaused: true -- externalId: pump_model-populate-pump_container +- externalId: tr_pump_model-populate-pump_container interval: 7 * * * * isPaused: true View: @@ -172,13 +178,13 @@ View: name: pumps source: externalId: Pump - space: pumpModelSpace + space: sp_pump_model_space type: view version: '1' type: externalId: LiftStation.pumps - space: pumpModelSpace - space: pumpModelSpace + space: sp_pump_model_space + space: sp_pump_model_space version: '1' - externalId: Pump filter: @@ -201,28 +207,28 @@ View: DesignPointFlowGPM: container: externalId: Pump - space: pumpModelSpace + space: sp_pump_model_space type: container containerPropertyIdentifier: DesignPointFlowGPM name: DesignPointFlowGPM DesignPointHeadFT: container: externalId: Pump - space: pumpModelSpace + space: sp_pump_model_space type: container containerPropertyIdentifier: DesignPointHeadFT name: DesignPointHeadFT LowHeadFT: container: externalId: Pump - space: pumpModelSpace + space: sp_pump_model_space type: container containerPropertyIdentifier: LowHeadFT name: LowHeadFT LowHeadFlowGPM: container: externalId: Pump - space: pumpModelSpace + space: sp_pump_model_space type: container containerPropertyIdentifier: LowHeadFlowGPM name: LowHeadFlowGPM @@ -235,26 +241,26 @@ View: name: liftStation source: externalId: LiftStation - space: pumpModelSpace + space: sp_pump_model_space type: view version: '1' - space: pumpModelSpace + space: sp_pump_model_space version: '1' deleted: DataModel: - externalId: PumpLiftStations - space: pumpModelSpace + space: sp_pump_model_space type: datamodel version: '1' Transformation: - - externalId: pump_model-populate-lift_station_pumps_edges - - externalId: pump_model-populate-pump_container + - externalId: tr_pump_model-populate-lift_station_pumps_edges + - externalId: tr_pump_model-populate-pump_container View: - externalId: LiftStation - space: pumpModelSpace + space: sp_pump_model_space type: view version: '1' - externalId: Pump - space: pumpModelSpace + space: sp_pump_model_space type: view version: '1' diff --git a/tests/test_approval_modules_snapshots_clean/cdf_apm_simple_data_model.yaml b/tests/test_approval_modules_snapshots_clean/cdf_apm_simple_data_model.yaml index 5b2182d34..5367d9dc6 100644 --- a/tests/test_approval_modules_snapshots_clean/cdf_apm_simple_data_model.yaml +++ b/tests/test_approval_modules_snapshots_clean/cdf_apm_simple_data_model.yaml @@ -1,21 +1,21 @@ deleted: Container: - externalId: Asset - space: apm_simple + space: sp_apm_simple type: container - externalId: WorkItem - space: apm_simple + space: sp_apm_simple type: container - externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: container DataModel: - externalId: apm_simple - space: apm_simple + space: sp_apm_simple type: datamodel version: '1' Space: - - apm_simple + - sp_apm_simple Transformation: - externalId: tr_asset_oid_workmate_apm_simple_load_asset2children - externalId: tr_asset_oid_workmate_apm_simple_load_assets @@ -27,14 +27,14 @@ deleted: - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders2assets View: - externalId: Asset - space: apm_simple + space: sp_apm_simple type: view version: '1' - externalId: WorkItem - space: apm_simple + space: sp_apm_simple type: view version: '1' - externalId: WorkOrder - space: apm_simple + space: sp_apm_simple type: view version: '1' diff --git a/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml b/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml index 233483fe0..c8fecb1ea 100644 --- a/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml +++ b/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml @@ -1,20 +1,20 @@ deleted: Container: - externalId: Asset - space: ExtendedSourceDataModels + space: sp_extended_source_data_models type: container DataModel: - externalId: ExtendedSourceData - space: ExtendedSourceDataModels + space: sp_extended_source_data_models type: datamodel version: '1' Space: - - ExtendedSourceDataModels - - cdfTemplateInstances + - sp_cdf_template_instances + - sp_extended_source_data_models Transformation: - - externalId: sync-asset_hierarchy_cdf_asset_source_model + - externalId: tr_sync-asset_hierarchy_cdf_asset_source_model View: - externalId: Asset - space: ExtendedSourceDataModels + space: sp_extended_source_data_models type: view version: '1' diff --git a/tests/test_approval_modules_snapshots_clean/example_pump_asset_hierarchy.yaml b/tests/test_approval_modules_snapshots_clean/example_pump_asset_hierarchy.yaml index 469bb8ab1..6a8c3b28b 100644 --- a/tests/test_approval_modules_snapshots_clean/example_pump_asset_hierarchy.yaml +++ b/tests/test_approval_modules_snapshots_clean/example_pump_asset_hierarchy.yaml @@ -4,4 +4,4 @@ deleted: name: - collections_pump Transformation: - - externalId: pump_asset_hierarchy-load-collections_pump + - externalId: tr_pump_asset_hierarchy-load-collections_pump diff --git a/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml b/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml index cec1332f1..ac94f3fb5 100644 --- a/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml +++ b/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml @@ -1,25 +1,25 @@ deleted: Container: - externalId: Pump - space: pumpModelSpace + space: sp_pump_model_space type: container DataModel: - externalId: PumpLiftStations - space: pumpModelSpace + space: sp_pump_model_space type: datamodel version: '1' Space: - - pumpInstanceSpace - - pumpModelSpace + - sp_pump_instance_space + - sp_pump_model_space Transformation: - - externalId: pump_model-populate-lift_station_pumps_edges - - externalId: pump_model-populate-pump_container + - externalId: tr_pump_model-populate-lift_station_pumps_edges + - externalId: tr_pump_model-populate-pump_container View: - externalId: LiftStation - space: pumpModelSpace + space: sp_pump_model_space type: view version: '1' - externalId: Pump - space: pumpModelSpace + space: sp_pump_model_space type: view version: '1' From 931b3ae16c034003d78a59dd6bb690d7b5b3a99e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 13:08:12 +0100 Subject: [PATCH 74/90] Savepoint tested schedule cration with Valhall --- cognite_toolkit/cdf_tk/load.py | 55 +++++++++++++++++-- ...m_simple_load_asset2children.schedule.yaml | 3 + ...rkmate_apm_simple_load_asset2children.yaml | 5 +- ...oid_workmate_asset_hierarchy.schedule.yaml | 3 + ...tr_asset_oid_workmate_asset_hierarchy.yaml | 6 +- 5 files changed, 59 insertions(+), 13 deletions(-) create mode 100644 cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.schedule.yaml create mode 100644 cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.schedule.yaml diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 2c5588247..2bf844006 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -42,6 +42,8 @@ TimeSeriesList, Transformation, TransformationList, + TransformationSchedule, + TransformationScheduleList, capabilities, ) from cognite.client.data_classes._base import ( @@ -685,6 +687,9 @@ def load_resource(self, filepath: Path, dry_run: bool) -> TimeSeries | TimeSerie class TransformationLoader(Loader[str, Transformation, TransformationList]): api_name = "transformations" folder_name = "transformations" + filename_pattern = ( + r"^(?:(?!\.schedule).)*$" # Matches all yaml files except file names who's stem contain *.schedule. + ) resource_cls = Transformation list_cls = TransformationList dependencies = frozenset({DataSetsLoader, RawLoader}) @@ -707,6 +712,10 @@ def get_id(self, item: Transformation) -> str: def load_resource(self, filepath: Path, dry_run: bool) -> Transformation: raw = load_yaml_inject_variables(filepath, self.ToolGlobals.environment_variables()) # The `authentication` key is custom for this template: + if raw.get("schedule"): + print("[ERROR] Schedule should not be part of transformations. yaml") + # with open()... + source_oidc_credentials = raw.get("authentication", {}).get("read") or raw.get("authentication") or {} destination_oidc_credentials = raw.get("authentication", {}).get("write") or raw.get("authentication") or {} transformation = Transformation.load(raw) @@ -748,13 +757,51 @@ def create(self, items: Sequence[Transformation], drop: bool, filepath: Path) -> print(f"[bold red]ERROR:[/] Failed to create resource(s).\n{e}") self.ToolGlobals.failed = True return TransformationList([]) - for t in items if isinstance(items, Sequence) else [items]: - if t.schedule.interval != "": - t.schedule.external_id = t.external_id - self.client.transformations.schedules.create(t.schedule) return created +@final +class TransformationScheduleLoader(Loader[str, TransformationSchedule, TransformationScheduleList]): + api_name = "transformations.schedules" + folder_name = "transformations" + filename_pattern = r"^.*\.schedule$" # Matches all yaml files who's stem contain *.schedule. + resource_cls = TransformationSchedule + list_cls = TransformationScheduleList + dependencies = frozenset({TransformationLoader}) + + @classmethod + def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: + scope = ( + TransformationsAcl.Scope.DataSet([ToolGlobals.data_set_id]) + if ToolGlobals.data_set_id + else TransformationsAcl.Scope.All() + ) + return TransformationsAcl( + [TransformationsAcl.Action.Read, TransformationsAcl.Action.Write], + scope, + ) + + def get_id(self, item: Transformation) -> str: + return item.external_id + + def load_resource(self, filepath: Path, dry_run: bool) -> TransformationSchedule: + raw = load_yaml_inject_variables(filepath, self.ToolGlobals.environment_variables()) + return TransformationSchedule.load(raw) + + def delete(self, ids: Sequence[str], drop_data: bool) -> int: + try: + self.client.transformations.schedules.delete(external_id=ids, ignore_unknown_ids=False) + return len(ids) + except CogniteNotFoundError as e: + print( + f" [bold yellow]WARNING:[/] {len(e.not_found)} out of {len(ids)} transformation schedules do(es) not exist." + ) + return len(ids) - len(e.not_found) + + def create(self, items: Sequence[Transformation], drop: bool, filepath: Path) -> TransformationList: + return self.client.transformations.schedules.create(items) + + @final class DatapointsLoader(Loader[list[str], Path, TimeSeriesList]): support_drop = False diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.schedule.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.schedule.yaml new file mode 100644 index 000000000..95bd0a6b3 --- /dev/null +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.schedule.yaml @@ -0,0 +1,3 @@ +schedule: + interval: '{{scheduleHourly}}' + isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml index 8015e18c2..cb8a69d85 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.yaml @@ -22,7 +22,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the cicd_audience - audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused : {{pause_transformations}} \ No newline at end of file + audience: {{cicd_audience}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.schedule.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.schedule.yaml new file mode 100644 index 000000000..35a46d095 --- /dev/null +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.schedule.yaml @@ -0,0 +1,3 @@ +externalId: 'tr_asset_{{location_name}}_{{source_name}}_asset_hierarchy' +interval: '{{scheduleHourly}}' +isPaused: {{pause_transformations}} diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml index f2119d09b..19663c3bd 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml @@ -16,8 +16,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the cicd_audience - audience: {{cicd_audience}} -schedule: - # every hour - interval: '{{scheduleHourly}}' - isPaused: {{pause_transformations}} + audience: {{cicd_audience}} \ No newline at end of file From 27fa941a6764cfd7a2e508cd3f5a81a422aef084 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 13:58:50 +0100 Subject: [PATCH 75/90] Savepoint split templates --- ...id_workmate_apm_simple_load_asset2children.schedule.yaml | 6 +++--- ..._asset_oid_workmate_apm_simple_load_assets.schedule.yaml | 3 +++ .../tr_asset_oid_workmate_apm_simple_load_assets.yaml | 5 +---- ...s_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml | 3 +++ ...timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml | 5 +---- ...der_oid_workmate_apm_simple_load_workitems.schedule.yaml | 3 +++ ...tr_workorder_oid_workmate_apm_simple_load_workitems.yaml | 5 +---- ..._workmate_apm_simple_load_workitems2assets.schedule.yaml | 3 +++ ...order_oid_workmate_apm_simple_load_workitems2assets.yaml | 5 +---- ...kmate_apm_simple_load_workitems2workorders.schedule.yaml | 3 +++ ...r_oid_workmate_apm_simple_load_workitems2workorders.yaml | 5 +---- ...er_oid_workmate_apm_simple_load_workorders.schedule.yaml | 3 +++ ...r_workorder_oid_workmate_apm_simple_load_workorders.yaml | 5 +---- ...workmate_apm_simple_load_workorders2assets.schedule.yaml | 3 +++ ...rder_oid_workmate_apm_simple_load_workorders2assets.yaml | 5 +---- .../tr_asset_oid_workmate_asset_hierarchy.yaml | 2 +- .../tr_asset_oid_workmate_asset_hierarchy.schedule.yaml | 3 +++ .../tr_asset_oid_workmate_asset_hierarchy.yaml | 5 +---- ...pump_asset_hierarchy-load-collections_pump.schedule.yaml | 3 +++ .../pump_asset_hierarchy-load-collections_pump.yaml | 5 +---- ...ync-asset_hierarchy_cdf_asset_source_model.schedule.yaml | 3 +++ .../sync-asset_hierarchy_cdf_asset_source_model.yaml | 5 +---- ...mp_model-populate-lift_station_pumps_edges.schedule.yaml | 3 +++ .../pump_model-populate-lift_station_pumps_edges.yaml | 3 --- .../pump_model-populate-pump_container.schedule.yaml | 3 +++ .../transformations/pump_model-populate-pump_container.yaml | 5 +---- ..._infield_sync_assets_from_hierarchy_to_apm.schedule.yaml | 3 +++ ..._workmate_infield_sync_assets_from_hierarchy_to_apm.yaml | 5 +---- ..._infield_sync_workorders_to_apm_activities.schedule.yaml | 3 +++ ..._workmate_infield_sync_workorders_to_apm_activities.yaml | 3 --- tr_asset_oid_workmate_apm_simple_load_assets.schedule.yaml | 3 +++ ...s_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml | 3 +++ ...der_oid_workmate_apm_simple_load_workitems.schedule.yaml | 3 +++ ..._workmate_apm_simple_load_workitems2assets.schedule.yaml | 3 +++ ...kmate_apm_simple_load_workitems2workorders.schedule.yaml | 3 +++ ...er_oid_workmate_apm_simple_load_workorders.schedule.yaml | 3 +++ ...workmate_apm_simple_load_workorders2assets.schedule.yaml | 3 +++ 37 files changed, 79 insertions(+), 58 deletions(-) create mode 100644 cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.schedule.yaml create mode 100644 cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml create mode 100644 cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.schedule.yaml create mode 100644 cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.schedule.yaml create mode 100644 cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.schedule.yaml create mode 100644 cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.schedule.yaml create mode 100644 cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.schedule.yaml create mode 100644 cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.schedule.yaml create mode 100644 cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.schedule.yaml create mode 100644 cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.schedule.yaml create mode 100644 cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.schedule.yaml create mode 100644 cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.schedule.yaml create mode 100644 cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.schedule.yaml create mode 100644 cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.schedule.yaml create mode 100644 tr_asset_oid_workmate_apm_simple_load_assets.schedule.yaml create mode 100644 tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml create mode 100644 tr_workorder_oid_workmate_apm_simple_load_workitems.schedule.yaml create mode 100644 tr_workorder_oid_workmate_apm_simple_load_workitems2assets.schedule.yaml create mode 100644 tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.schedule.yaml create mode 100644 tr_workorder_oid_workmate_apm_simple_load_workorders.schedule.yaml create mode 100644 tr_workorder_oid_workmate_apm_simple_load_workorders2assets.schedule.yaml diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.schedule.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.schedule.yaml index 95bd0a6b3..6ffe404a7 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.schedule.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_asset2children.schedule.yaml @@ -1,3 +1,3 @@ -schedule: - interval: '{{scheduleHourly}}' - isPaused : {{pause_transformations}} \ No newline at end of file +externalId: tr_asset_{{default_location}}_{{source_asset}}_apm_simple_load_asset2children +interval: '{{scheduleHourly}}' +isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.schedule.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.schedule.yaml new file mode 100644 index 000000000..91aff87fc --- /dev/null +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_asset_{{default_location}}_{{source_asset}}_apm_simple_load_assets +interval: '{{scheduleHourly}}' +isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml index e4c61b476..a313f02d1 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_asset_oid_workmate_apm_simple_load_assets.yaml @@ -21,7 +21,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the cicd_audience - audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused : {{pause_transformations}} \ No newline at end of file + audience: {{cicd_audience}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml new file mode 100644 index 000000000..4580ca67a --- /dev/null +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_timeseries_{{default_location}}_{{source_timeseries}}_apm_simple_load_timeseries2assets +interval: '{{scheduleHourly}}' +isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml index 659d984b2..5fa7a3240 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.yaml @@ -21,7 +21,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the cicd_audience - audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused : {{pause_transformations}} \ No newline at end of file + audience: {{cicd_audience}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.schedule.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.schedule.yaml new file mode 100644 index 000000000..338f40459 --- /dev/null +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_workorder_{{default_location}}_{{source_workorder}}_apm_simple_load_workitems +interval: '{{scheduleHourly}}' +isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml index 6354fa90f..5158c4dd4 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems.yaml @@ -21,7 +21,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the cicd_audience - audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused : {{pause_transformations}} \ No newline at end of file + audience: {{cicd_audience}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.schedule.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.schedule.yaml new file mode 100644 index 000000000..6f4161c4f --- /dev/null +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_workorder_{{default_location}}_{{source_workorder}}_apm_simple_load_workitems2assets +interval: '{{scheduleHourly}}' +isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml index 52c9aa386..6d7c156d8 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.yaml @@ -22,7 +22,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the cicd_audience - audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused : {{pause_transformations}} \ No newline at end of file + audience: {{cicd_audience}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.schedule.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.schedule.yaml new file mode 100644 index 000000000..9d4ca24eb --- /dev/null +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_workorder_{{default_location}}_{{source_workorder}}_apm_simple_load_workitems2workorders +interval: '{{scheduleHourly}}' +isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml index b68e562f5..b982a3de7 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.yaml @@ -22,7 +22,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the cicd_audience - audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused : {{pause_transformations}} \ No newline at end of file + audience: {{cicd_audience}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.schedule.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.schedule.yaml new file mode 100644 index 000000000..7ef1882a5 --- /dev/null +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_workorder_{{default_location}}_{{source_workorder}}_apm_simple_load_workorders +interval: '{{scheduleHourly}}' +isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml index b3cf23827..c01d67f73 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders.yaml @@ -21,7 +21,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the cicd_audience - audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused : {{pause_transformations}} \ No newline at end of file + audience: {{cicd_audience}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.schedule.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.schedule.yaml new file mode 100644 index 000000000..de33f88aa --- /dev/null +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_workorder_{{default_location}}_{{source_workorder}}_apm_simple_load_workorders2assets +interval: '{{scheduleHourly}}' +isPaused : {{pause_transformations}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml index 32764fd2f..20c3d9ddc 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_apm_simple_data_model/transformations/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.yaml @@ -22,7 +22,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the cicd_audience - audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused : {{pause_transformations}} \ No newline at end of file + audience: {{cicd_audience}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml index 19663c3bd..8449e7086 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml @@ -16,4 +16,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the cicd_audience - audience: {{cicd_audience}} \ No newline at end of file + audience: {{cicd_audience}} diff --git a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.schedule.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.schedule.yaml new file mode 100644 index 000000000..4397716cb --- /dev/null +++ b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_asset_{{default_location}}_{{source_asset}}_asset_hierarchy +interval: '{{scheduleHourly}}' +isPaused: false \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml index a26d32e70..091e9afcc 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml @@ -15,7 +15,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the cicd_audience - audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused: false \ No newline at end of file + audience: {{cicd_audience}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.schedule.yaml b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.schedule.yaml new file mode 100644 index 000000000..e7f7d7b27 --- /dev/null +++ b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.schedule.yaml @@ -0,0 +1,3 @@ +externalId: pump_asset_hierarchy-load-collections_pump +interval: '{{scheduleHourly}}' +isPaused: true diff --git a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml index 7d0b5fc45..bf2c09aca 100644 --- a/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml +++ b/cognite_toolkit/cognite_modules/examples/example_pump_asset_hierarchy/transformations/pump_asset_hierarchy-load-collections_pump.yaml @@ -13,7 +13,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the audience - audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused: true + audience: {{cicd_audience}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.schedule.yaml b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.schedule.yaml new file mode 100644 index 000000000..7c9de7953 --- /dev/null +++ b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.schedule.yaml @@ -0,0 +1,3 @@ +externalId: sync-asset_hierarchy_cdf_asset_source_model +interval: '{{scheduleHourly}}' +isPaused: true diff --git a/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml index 0c38c60af..9ef3d3696 100644 --- a/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml +++ b/cognite_toolkit/cognite_modules/experimental/cdf_asset_source_model/transformations/sync-asset_hierarchy_cdf_asset_source_model.yaml @@ -19,7 +19,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the audience - audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused: true + audience: {{cicd_audience}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.schedule.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.schedule.yaml new file mode 100644 index 000000000..ca7b8bc38 --- /dev/null +++ b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.schedule.yaml @@ -0,0 +1,3 @@ +externalId: pump_model-populate-lift_station_pumps_edges +interval: '{{scheduleHourly}}' +isPaused: true diff --git a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml index b1f0a2cfd..92a6549a1 100644 --- a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml +++ b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-lift_station_pumps_edges.yaml @@ -21,6 +21,3 @@ authentication: scopes: {{cicd_scopes}} # Optional: If idP requires providing the audience audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused: true diff --git a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.schedule.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.schedule.yaml new file mode 100644 index 000000000..471fe35ec --- /dev/null +++ b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.schedule.yaml @@ -0,0 +1,3 @@ +externalId: pump_model-populate-pump_container +interval: '{{scheduleHourly}}' +isPaused: true diff --git a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml index 0d3987ac7..9481afddd 100644 --- a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml +++ b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/transformations/pump_model-populate-pump_container.yaml @@ -19,7 +19,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the audience - audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused: true + audience: {{cicd_audience}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.schedule.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.schedule.yaml new file mode 100644 index 000000000..9de320f46 --- /dev/null +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_asset_{{default_location}}_{{source_asset}}_infield_sync_assets_from_hierarchy_to_apm +interval: '{{scheduleHourly}}' +isPaused: false diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml index 411e651da..4914f1d62 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm.yaml @@ -20,7 +20,4 @@ authentication: cdfProjectName: {{cdfProjectName}} scopes: {{cicd_scopes}} # Optional: If idP requires providing the audience - audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused: false + audience: {{cicd_audience}} \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.schedule.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.schedule.yaml new file mode 100644 index 000000000..68485c7f2 --- /dev/null +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_workorder_{{default_location}}_{{source_workorder}}_infield_sync_workorders_to_apm_activities +interval: '{{scheduleHourly}}' +isPaused: false diff --git a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml index 6ba6fee82..914ac575e 100644 --- a/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml +++ b/cognite_toolkit/cognite_modules/infield/cdf_infield_location/transformations/tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities.yaml @@ -22,6 +22,3 @@ authentication: scopes: {{cicd_scopes}} # Optional: If idP requires providing the audience audience: {{cicd_audience}} -schedule: - interval: '{{scheduleHourly}}' - isPaused: false diff --git a/tr_asset_oid_workmate_apm_simple_load_assets.schedule.yaml b/tr_asset_oid_workmate_apm_simple_load_assets.schedule.yaml new file mode 100644 index 000000000..cf16ac24c --- /dev/null +++ b/tr_asset_oid_workmate_apm_simple_load_assets.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_asset_oid_workmate_apm_simple_load_assets +interval: 7 * * * * +isPaused: true diff --git a/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml b/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml new file mode 100644 index 000000000..546c15610 --- /dev/null +++ b/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_timeseries_oid_pi_apm_simple_load_timeseries2assets +interval: 7 * * * * +isPaused: true diff --git a/tr_workorder_oid_workmate_apm_simple_load_workitems.schedule.yaml b/tr_workorder_oid_workmate_apm_simple_load_workitems.schedule.yaml new file mode 100644 index 000000000..07566d49c --- /dev/null +++ b/tr_workorder_oid_workmate_apm_simple_load_workitems.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_workorder_oid_workmate_apm_simple_load_workitems +interval: 7 * * * * +isPaused: true diff --git a/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.schedule.yaml b/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.schedule.yaml new file mode 100644 index 000000000..3f741a1d9 --- /dev/null +++ b/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2assets +interval: 7 * * * * +isPaused: true diff --git a/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.schedule.yaml b/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.schedule.yaml new file mode 100644 index 000000000..c82d49442 --- /dev/null +++ b/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2workorders +interval: 7 * * * * +isPaused: true diff --git a/tr_workorder_oid_workmate_apm_simple_load_workorders.schedule.yaml b/tr_workorder_oid_workmate_apm_simple_load_workorders.schedule.yaml new file mode 100644 index 000000000..1430b2aee --- /dev/null +++ b/tr_workorder_oid_workmate_apm_simple_load_workorders.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_workorder_oid_workmate_apm_simple_load_workorders +interval: 7 * * * * +isPaused: true diff --git a/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.schedule.yaml b/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.schedule.yaml new file mode 100644 index 000000000..db4b143cb --- /dev/null +++ b/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.schedule.yaml @@ -0,0 +1,3 @@ +externalId: tr_workorder_oid_workmate_apm_simple_load_workorders2assets +interval: 7 * * * * +isPaused: true From 3348294abd2bceeeded5662702c4de84dd79fea9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 14:00:04 +0100 Subject: [PATCH 76/90] New loader ready --- cognite_toolkit/cdf_tk/load.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 2bf844006..d8e5d356e 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -712,9 +712,6 @@ def get_id(self, item: Transformation) -> str: def load_resource(self, filepath: Path, dry_run: bool) -> Transformation: raw = load_yaml_inject_variables(filepath, self.ToolGlobals.environment_variables()) # The `authentication` key is custom for this template: - if raw.get("schedule"): - print("[ERROR] Schedule should not be part of transformations. yaml") - # with open()... source_oidc_credentials = raw.get("authentication", {}).get("read") or raw.get("authentication") or {} destination_oidc_credentials = raw.get("authentication", {}).get("write") or raw.get("authentication") or {} From ebd4327d11aa19a68c5fbb4ab721d86ca48eee74 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 14:23:12 +0100 Subject: [PATCH 77/90] Tests regen'ed --- .../cdf_apm_simple_data_model.yaml | 41 ++++--------------- .../cdf_asset_source_model.yaml | 6 +-- .../cdf_data_pipeline_asset_valhall.yaml | 6 +-- .../cdf_infield_location.yaml | 11 ++--- .../cdf_oid_example_data.yaml | 6 +-- .../example_pump_asset_hierarchy.yaml | 6 +-- .../example_pump_data_model.yaml | 11 ++--- .../cdf_apm_simple_data_model.yaml | 9 ++++ .../cdf_asset_source_model.yaml | 2 + .../cdf_data_pipeline_asset_valhall.yaml | 2 + .../cdf_infield_location.yaml | 3 ++ .../cdf_oid_example_data.yaml | 2 + .../example_pump_asset_hierarchy.yaml | 2 + .../example_pump_data_model.yaml | 3 ++ 14 files changed, 46 insertions(+), 64 deletions(-) diff --git a/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml b/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml index 80291771b..52976317e 100644 --- a/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml +++ b/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml @@ -355,10 +355,6 @@ Transformation: query: "select\n cast(`externalId` as STRING) as externalId,\n node_reference('apm_simple',\ \ `sourceExternalId`) as startNode,\n node_reference('apm_simple', `targetExternalId`)\ \ as endNode\nfrom\n `asset_oid_workmate`.`asset2children`;\n" - schedule: - externalId: tr_asset_oid_workmate_apm_simple_load_asset2children - interval: 7 * * * * - isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -395,10 +391,6 @@ Transformation: \ `parentExternalId`) as parent,\n cast(`description` as STRING) as description,\n\ \ cast(`tag` as STRING) as tag,\n cast(`areaId` as INT) as areaId,\n cast(`isActive`\ \ as BOOLEAN) as isActive\nfrom\n `asset_oid_workmate`.`assets`;\n" - schedule: - externalId: tr_asset_oid_workmate_apm_simple_load_assets - interval: 7 * * * * - isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -429,10 +421,6 @@ Transformation: ownerIsCurrentUser: true query: "select\n cast(`asset` as STRING) as externalId,\n array(timeseries) as\ \ metrics\nfrom\n `files_oid_{{source_files}}`.`timeseries2assets`;\n" - schedule: - externalId: tr_timeseries_oid_pi_apm_simple_load_timeseries2assets - interval: 7 * * * * - isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -467,10 +455,6 @@ Transformation: \ cast(`itemName` as STRING) as itemName,\n cast(`title` as STRING) as title,\n\ \ cast(`criticality` as STRING) as criticality,\n cast(`method` as STRING) as\ \ method,\n cast(`isCompleted` as BOOLEAN) as isCompleted\nfrom\n `workorder_oid_workmate`.`workitems`;\n" - schedule: - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems - interval: 7 * * * * - isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -508,10 +492,6 @@ Transformation: \ as STRING) as externalId,\n node_reference('apm_simple', `sourceExternalId`)\ \ as startNode,\n node_reference('apm_simple', `targetExternalId`) as endNode\n\ from\n `workorder_oid_workmate`.`workitem2assets`;\n" - schedule: - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2assets - interval: 7 * * * * - isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -549,10 +529,6 @@ Transformation: \ as STRING) as externalId,\n node_reference('apm_simple', `sourceExternalId`)\ \ as startNode,\n node_reference('apm_simple', `targetExternalId`) as endNode\n\ from\n `workorder_oid_workmate`.`workorder2items`;\n" - schedule: - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2workorders - interval: 7 * * * * - isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -594,10 +570,6 @@ Transformation: \ as STRING) as priorityDescription, \ncast(`dueDate` as TIMESTAMP) as dueDate,\ \ \ncast(`createdDate` as TIMESTAMP) as createdDate, \ncast(`programNumber` as\ \ STRING) as programNumber \nfrom `workorder_oid_workmate`.`workorders`;\n" - schedule: - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders - interval: 7 * * * * - isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -629,10 +601,6 @@ Transformation: query: "select\n cast(`externalId` as STRING) as externalId,\n node_reference('apm_simple',\ \ `sourceExternalId`) as startNode,\n node_reference('apm_simple', `targetExternalId`)\ \ as endNode\nfrom\n `workorder_oid_workmate`.`workorder2assets`;\n" - schedule: - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders2assets - interval: 7 * * * * - isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -1024,6 +992,15 @@ deleted: - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2workorders - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders2assets + TransformationSchedule: + - externalId: tr_asset_oid_workmate_apm_simple_load_asset2children + - externalId: tr_asset_oid_workmate_apm_simple_load_assets + - externalId: tr_timeseries_oid_pi_apm_simple_load_timeseries2assets + - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems + - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2assets + - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2workorders + - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders + - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders2assets View: - externalId: Asset space: apm_simple diff --git a/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml b/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml index 0fb3042ec..103105ac7 100644 --- a/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml +++ b/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml @@ -75,10 +75,6 @@ Transformation: \ cdf_assetSubtree('lift_pump_stations:root')\nwhere\n-- This is used to select\ \ the Pumps.\n isnotnull(`externalId`) and isnotnull(`parentExternalId`) and startswith(name,\ \ 'Pump');\n" - schedule: - externalId: sync-asset_hierarchy_cdf_asset_source_model - interval: 7 * * * * - isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -116,6 +112,8 @@ deleted: version: '1' Transformation: - externalId: sync-asset_hierarchy_cdf_asset_source_model + TransformationSchedule: + - externalId: sync-asset_hierarchy_cdf_asset_source_model View: - externalId: Asset space: ExtendedSourceDataModels diff --git a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml index 11de4f075..1fcc02f34 100644 --- a/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml +++ b/tests/test_approval_modules_snapshots/cdf_data_pipeline_asset_valhall.yaml @@ -143,10 +143,6 @@ Transformation: \ as dataSetId,\n to_metadata_except(\n array(\"sourceDb\", \"parentTag\"\ , \"description\"), *) \n as metadata\nFROM \n\ \ `asset_oid_workmate`.`assets`\n" - schedule: - externalId: tr_asset_oid_workmate_asset_hierarchy - interval: 7 * * * * - isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -163,3 +159,5 @@ deleted: - externalId: ep_src_asset_oid_workmate Transformation: - externalId: tr_asset_oid_workmate_asset_hierarchy + TransformationSchedule: + - externalId: tr_asset_oid_workmate_asset_hierarchy diff --git a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml index c969f3740..377ddb73b 100644 --- a/tests/test_approval_modules_snapshots/cdf_infield_location.yaml +++ b/tests/test_approval_modules_snapshots/cdf_infield_location.yaml @@ -453,10 +453,6 @@ Transformation: \ cast(asset.name as STRING) as title,\n cast(asset.externalId as STRING) as\ \ sourceId\nfrom\n cdf_assetSubtree('WMT:VAL') as asset\n -- Get root asset\n\ \ inner join cdf_assetSubtree('WMT:VAL') as rootAsset on asset.rootId = rootAsset.id\n" - schedule: - externalId: tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm - interval: 7 * * * * - isPaused: false sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -494,10 +490,6 @@ Transformation: \ as startTime,\n cast(date_add(current_date(), 7) as TIMESTAMP) as endTime,\n\ \ cast(`title` as STRING) as title,\n 'WMT:VAL' as rootLocation,\n 'workmate'\ \ as source\n from\n `workorder_oid_workmate`.`workorders`;\n" - schedule: - externalId: tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities - interval: 7 * * * * - isPaused: false sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -516,3 +508,6 @@ deleted: Transformation: - externalId: tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm - externalId: tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities + TransformationSchedule: + - externalId: tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm + - externalId: tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities diff --git a/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml b/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml index a394d3b10..f54966a10 100644 --- a/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml +++ b/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml @@ -674,10 +674,6 @@ Transformation: \ as STRING) as name,\n cast(`description` as STRING) as description,\n cast(`sourceDb`\ \ as STRING) as source,\n cast(`parentExternalId` as STRING) as parentExternalId\n\ from\n `asset_oid_workmate`.`assets`;\n" - schedule: - externalId: tr_asset_oid_workmate_asset_hierarchy - interval: 7 * * * * - isPaused: false sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -740,3 +736,5 @@ deleted: - externalId: pi_191092 Transformation: - externalId: tr_asset_oid_workmate_asset_hierarchy + TransformationSchedule: + - externalId: tr_asset_oid_workmate_asset_hierarchy diff --git a/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml b/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml index 133bfb013..3da9bd844 100644 --- a/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml +++ b/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml @@ -75,10 +75,6 @@ Transformation: \ LowHeadFT,\n LowHeadFlowGPM,\n PumpControl,\n PumpModel,\n Shape__Length,\n\ \ Enabled,\n DesignPointHeadFT,\n LowHeadFT,\n FacilityID,\n InstallDate,\n\ \ LifeCycleStatus,\n LocationDescription\n ) as metadata\nfrom `pump_assets`.`collections_pump`\n" - schedule: - externalId: pump_asset_hierarchy-load-collections_pump - interval: 7 * * * * - isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -97,3 +93,5 @@ deleted: - collections_pump Transformation: - externalId: pump_asset_hierarchy-load-collections_pump + TransformationSchedule: + - externalId: pump_asset_hierarchy-load-collections_pump diff --git a/tests/test_approval_modules_snapshots/example_pump_data_model.yaml b/tests/test_approval_modules_snapshots/example_pump_data_model.yaml index e3ff12c36..08a246dc4 100644 --- a/tests/test_approval_modules_snapshots/example_pump_data_model.yaml +++ b/tests/test_approval_modules_snapshots/example_pump_data_model.yaml @@ -90,10 +90,6 @@ Transformation: \ as STRING)) as externalId,\n `parent` as startNode,\n node_reference('pumpInstanceSpace',\ \ cast(`externalId` as STRING)) as endNode\nfrom\n cdf_data_models(\"ExtendedSourceDataModels\"\ , \"ExtendedSourceData\", \"1\", \"Asset\")\nwhere\n startswith(title, 'Pump')\n" - schedule: - externalId: pump_model-populate-lift_station_pumps_edges - interval: 7 * * * * - isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -129,10 +125,6 @@ Transformation: \ '$.LowHeadFlowGPM') as DOUBLE) as LowHeadFlowGPM\nfrom\n cdf_data_models(\"\ ExtendedSourceDataModels\", \"ExtendedSourceData\", \"1\", \"Asset\")\nwhere\n\ \ startswith(title, 'Pump')\n" - schedule: - externalId: pump_model-populate-pump_container - interval: 7 * * * * - isPaused: true sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -249,6 +241,9 @@ deleted: Transformation: - externalId: pump_model-populate-lift_station_pumps_edges - externalId: pump_model-populate-pump_container + TransformationSchedule: + - externalId: pump_model-populate-lift_station_pumps_edges + - externalId: pump_model-populate-pump_container View: - externalId: LiftStation space: pumpModelSpace diff --git a/tests/test_approval_modules_snapshots_clean/cdf_apm_simple_data_model.yaml b/tests/test_approval_modules_snapshots_clean/cdf_apm_simple_data_model.yaml index 5b2182d34..d483afa25 100644 --- a/tests/test_approval_modules_snapshots_clean/cdf_apm_simple_data_model.yaml +++ b/tests/test_approval_modules_snapshots_clean/cdf_apm_simple_data_model.yaml @@ -25,6 +25,15 @@ deleted: - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2workorders - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders2assets + TransformationSchedule: + - externalId: tr_asset_oid_workmate_apm_simple_load_asset2children + - externalId: tr_asset_oid_workmate_apm_simple_load_assets + - externalId: tr_timeseries_oid_pi_apm_simple_load_timeseries2assets + - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems + - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2assets + - externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2workorders + - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders + - externalId: tr_workorder_oid_workmate_apm_simple_load_workorders2assets View: - externalId: Asset space: apm_simple diff --git a/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml b/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml index 233483fe0..d192f7e9b 100644 --- a/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml +++ b/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml @@ -13,6 +13,8 @@ deleted: - cdfTemplateInstances Transformation: - externalId: sync-asset_hierarchy_cdf_asset_source_model + TransformationSchedule: + - externalId: sync-asset_hierarchy_cdf_asset_source_model View: - externalId: Asset space: ExtendedSourceDataModels diff --git a/tests/test_approval_modules_snapshots_clean/cdf_data_pipeline_asset_valhall.yaml b/tests/test_approval_modules_snapshots_clean/cdf_data_pipeline_asset_valhall.yaml index 7a44a713c..f8e6d3787 100644 --- a/tests/test_approval_modules_snapshots_clean/cdf_data_pipeline_asset_valhall.yaml +++ b/tests/test_approval_modules_snapshots_clean/cdf_data_pipeline_asset_valhall.yaml @@ -3,3 +3,5 @@ deleted: - externalId: ep_src_asset_oid_workmate Transformation: - externalId: tr_asset_oid_workmate_asset_hierarchy + TransformationSchedule: + - externalId: tr_asset_oid_workmate_asset_hierarchy diff --git a/tests/test_approval_modules_snapshots_clean/cdf_infield_location.yaml b/tests/test_approval_modules_snapshots_clean/cdf_infield_location.yaml index fd4c139c1..b686b3b43 100644 --- a/tests/test_approval_modules_snapshots_clean/cdf_infield_location.yaml +++ b/tests/test_approval_modules_snapshots_clean/cdf_infield_location.yaml @@ -9,3 +9,6 @@ deleted: Transformation: - externalId: tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm - externalId: tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities + TransformationSchedule: + - externalId: tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm + - externalId: tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities diff --git a/tests/test_approval_modules_snapshots_clean/cdf_oid_example_data.yaml b/tests/test_approval_modules_snapshots_clean/cdf_oid_example_data.yaml index 38959a33f..e57ea2f90 100644 --- a/tests/test_approval_modules_snapshots_clean/cdf_oid_example_data.yaml +++ b/tests/test_approval_modules_snapshots_clean/cdf_oid_example_data.yaml @@ -49,3 +49,5 @@ deleted: - externalId: pi_191092 Transformation: - externalId: tr_asset_oid_workmate_asset_hierarchy + TransformationSchedule: + - externalId: tr_asset_oid_workmate_asset_hierarchy diff --git a/tests/test_approval_modules_snapshots_clean/example_pump_asset_hierarchy.yaml b/tests/test_approval_modules_snapshots_clean/example_pump_asset_hierarchy.yaml index 469bb8ab1..e678ffe4b 100644 --- a/tests/test_approval_modules_snapshots_clean/example_pump_asset_hierarchy.yaml +++ b/tests/test_approval_modules_snapshots_clean/example_pump_asset_hierarchy.yaml @@ -5,3 +5,5 @@ deleted: - collections_pump Transformation: - externalId: pump_asset_hierarchy-load-collections_pump + TransformationSchedule: + - externalId: pump_asset_hierarchy-load-collections_pump diff --git a/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml b/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml index cec1332f1..9e07a0b21 100644 --- a/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml +++ b/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml @@ -14,6 +14,9 @@ deleted: Transformation: - externalId: pump_model-populate-lift_station_pumps_edges - externalId: pump_model-populate-pump_container + TransformationSchedule: + - externalId: pump_model-populate-lift_station_pumps_edges + - externalId: pump_model-populate-pump_container View: - externalId: LiftStation space: pumpModelSpace From f862a9be06a2a18a0a3eae5d2fead7202894c5e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 14:25:18 +0100 Subject: [PATCH 78/90] Changelogs --- CHANGELOG.cdf-tk.md | 1 + CHANGELOG.templates.md | 1 + 2 files changed, 2 insertions(+) diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index 7e3aa013f..384a1ebb4 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -25,6 +25,7 @@ Changes are grouped as follows: - Check for whether data set id is present in the config files. - Print table at the end of `cdf-tk deploy` with the resources that were created, deleted, and skipped. - Support for Extraction Pipelines and Extraction Pipeline configuration for remotely configured Extractors +- Separate loader for Transformation Schedule resources. ### Removed diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index 9fc21b00d..a3796776c 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -37,6 +37,7 @@ Changes are grouped as follows: - Combined the child and parent transformations `sync_assets_from_hierarchy_to_apm` in `cdf_infield_location`. This has the benefit of not having to wait for the parent transformation to finish before starting the child transformation, thus no longer a dependency between the two transformations. +- Transformation Schedules broken out into separate files, following naming convention `.schedule.yaml`. ### Fixed From a53b4f8e0c895b2f72abb6994058fae69045def9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 14:32:53 +0100 Subject: [PATCH 79/90] tests regen'ed again --- .../cdf_apm_simple_data_model.yaml | 2 +- .../cdf_asset_source_model.yaml | 18 +++++++++--------- .../example_pump_asset_hierarchy.yaml | 4 ++-- .../example_pump_data_model.yaml | 12 ++++++------ .../cdf_asset_source_model.yaml | 2 +- .../example_pump_asset_hierarchy.yaml | 2 +- .../example_pump_data_model.yaml | 4 ++-- 7 files changed, 22 insertions(+), 22 deletions(-) diff --git a/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml b/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml index a9a36d927..628830cda 100644 --- a/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml +++ b/tests/test_approval_modules_snapshots/cdf_apm_simple_data_model.yaml @@ -427,7 +427,7 @@ Transformation: name: timeseries:oid:pi:apm_simple:load_timeseries2assets ownerIsCurrentUser: true query: "select\n cast(`asset` as STRING) as externalId,\n array(timeseries) as\ - \ metrics\nfrom\n `files_oid_{{source_files}}`.`timeseries2assets`;\n" + \ metrics\nfrom\n `files_oid_pi`.`timeseries2assets`;\n" sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} diff --git a/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml b/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml index e5da6feda..cf7fe66f2 100644 --- a/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml +++ b/tests/test_approval_modules_snapshots/cdf_asset_source_model.yaml @@ -73,13 +73,13 @@ Transformation: where\n-- This is used to select the Lift Stations.\n isnotnull(`externalId`)\ \ and isnotnull(`parentExternalId`) and not startswith(name, 'Pump')\n\nUNION\ \ ALL\n-- Pumps\nselect\n concat('pump:', cast(`externalId` as STRING)) as externalId,\n\ - \ node_reference('cdfTemplateInstances', `parentExternalId`) as parent,\n node_reference('cdfTemplateInstances',\ - \ 'lift_pump_stations:root') as root,\n cast(`name` as STRING) as title,\n cast(`source`\ - \ as STRING) as source,\n cast(`description` as STRING) as description,\n cast(`labels`\ - \ as ARRAY < STRING >) as labels,\n to_json(`metadata`) as metadata\nfrom\n \ - \ cdf_assetSubtree('lift_pump_stations:root')\nwhere\n-- This is used to select\ - \ the Pumps.\n isnotnull(`externalId`) and isnotnull(`parentExternalId`) and startswith(name,\ - \ 'Pump');\n" + \ node_reference('sp_cdf_template_instances', `parentExternalId`) as parent,\n\ + \ node_reference('sp_cdf_template_instances', 'lift_pump_stations:root') as root,\n\ + \ cast(`name` as STRING) as title,\n cast(`source` as STRING) as source,\n \ + \ cast(`description` as STRING) as description,\n cast(`labels` as ARRAY < STRING\ + \ >) as labels,\n to_json(`metadata`) as metadata\nfrom\n cdf_assetSubtree('lift_pump_stations:root')\n\ + where\n-- This is used to select the Pumps.\n isnotnull(`externalId`) and isnotnull(`parentExternalId`)\ + \ and startswith(name, 'Pump');\n" sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -88,7 +88,7 @@ Transformation: scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: -- externalId: tr_sync-asset_hierarchy_cdf_asset_source_model +- externalId: sync-asset_hierarchy_cdf_asset_source_model interval: 7 * * * * isPaused: true View: @@ -116,7 +116,7 @@ deleted: type: datamodel version: '1' Transformation: - - externalId: sync-asset_hierarchy_cdf_asset_source_model + - externalId: tr_sync-asset_hierarchy_cdf_asset_source_model TransformationSchedule: - externalId: sync-asset_hierarchy_cdf_asset_source_model View: diff --git a/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml b/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml index 2e779af0c..9fbd755cc 100644 --- a/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml +++ b/tests/test_approval_modules_snapshots/example_pump_asset_hierarchy.yaml @@ -84,7 +84,7 @@ Transformation: scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: -- externalId: tr_pump_asset_hierarchy-load-collections_pump +- externalId: pump_asset_hierarchy-load-collections_pump interval: 7 * * * * isPaused: true deleted: @@ -93,6 +93,6 @@ deleted: name: - collections_pump Transformation: - - externalId: pump_asset_hierarchy-load-collections_pump + - externalId: tr_pump_asset_hierarchy-load-collections_pump TransformationSchedule: - externalId: pump_asset_hierarchy-load-collections_pump diff --git a/tests/test_approval_modules_snapshots/example_pump_data_model.yaml b/tests/test_approval_modules_snapshots/example_pump_data_model.yaml index d1bf810e6..15488d3d6 100644 --- a/tests/test_approval_modules_snapshots/example_pump_data_model.yaml +++ b/tests/test_approval_modules_snapshots/example_pump_data_model.yaml @@ -129,8 +129,8 @@ Transformation: \ '$.LowHeadFT') as DOUBLE) as LowHeadFT,\n cast(get_json_object(`metadata`,\ \ '$.DesignPointFlowGPM') as DOUBLE) as DesignPointFlowGPM,\n cast(get_json_object(`metadata`,\ \ '$.LowHeadFlowGPM') as DOUBLE) as LowHeadFlowGPM\nfrom\n cdf_data_models(\"\ - ExtendedSourceDataModels\", \"ExtendedSourceData\", \"1\", \"Asset\")\nwhere\n\ - \ startswith(title, 'Pump')\n" + sp_extended_source_data_models\", \"ExtendedSourceData\", \"1\", \"Asset\")\n\ + where\n startswith(title, 'Pump')\n" sourceOidcCredentials: audience: ${IDP_AUDIENCE} cdfProjectName: ${CDF_PROJECT} @@ -139,10 +139,10 @@ Transformation: scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: -- externalId: tr_pump_model-populate-lift_station_pumps_edges +- externalId: pump_model-populate-lift_station_pumps_edges interval: 7 * * * * isPaused: true -- externalId: tr_pump_model-populate-pump_container +- externalId: pump_model-populate-pump_container interval: 7 * * * * isPaused: true View: @@ -245,8 +245,8 @@ deleted: type: datamodel version: '1' Transformation: - - externalId: pump_model-populate-lift_station_pumps_edges - - externalId: pump_model-populate-pump_container + - externalId: tr_pump_model-populate-lift_station_pumps_edges + - externalId: tr_pump_model-populate-pump_container TransformationSchedule: - externalId: pump_model-populate-lift_station_pumps_edges - externalId: pump_model-populate-pump_container diff --git a/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml b/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml index 639ce2263..fb0b88950 100644 --- a/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml +++ b/tests/test_approval_modules_snapshots_clean/cdf_asset_source_model.yaml @@ -12,7 +12,7 @@ deleted: - sp_cdf_template_instances - sp_extended_source_data_models Transformation: - - externalId: sync-asset_hierarchy_cdf_asset_source_model + - externalId: tr_sync-asset_hierarchy_cdf_asset_source_model TransformationSchedule: - externalId: sync-asset_hierarchy_cdf_asset_source_model View: diff --git a/tests/test_approval_modules_snapshots_clean/example_pump_asset_hierarchy.yaml b/tests/test_approval_modules_snapshots_clean/example_pump_asset_hierarchy.yaml index e678ffe4b..6943a48bc 100644 --- a/tests/test_approval_modules_snapshots_clean/example_pump_asset_hierarchy.yaml +++ b/tests/test_approval_modules_snapshots_clean/example_pump_asset_hierarchy.yaml @@ -4,6 +4,6 @@ deleted: name: - collections_pump Transformation: - - externalId: pump_asset_hierarchy-load-collections_pump + - externalId: tr_pump_asset_hierarchy-load-collections_pump TransformationSchedule: - externalId: pump_asset_hierarchy-load-collections_pump diff --git a/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml b/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml index a439032e7..597996087 100644 --- a/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml +++ b/tests/test_approval_modules_snapshots_clean/example_pump_data_model.yaml @@ -12,8 +12,8 @@ deleted: - sp_pump_instance_space - sp_pump_model_space Transformation: - - externalId: pump_model-populate-lift_station_pumps_edges - - externalId: pump_model-populate-pump_container + - externalId: tr_pump_model-populate-lift_station_pumps_edges + - externalId: tr_pump_model-populate-pump_container TransformationSchedule: - externalId: pump_model-populate-lift_station_pumps_edges - externalId: pump_model-populate-pump_container From 1f385752f9594e630b2e5c0f38ea6981362a6bc0 Mon Sep 17 00:00:00 2001 From: Greger Wedel Date: Fri, 15 Dec 2023 15:40:25 +0100 Subject: [PATCH 80/90] Resurrect --drop-data --- cognite_toolkit/cdf.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/cognite_toolkit/cdf.py b/cognite_toolkit/cdf.py index a014c510e..719d984cd 100755 --- a/cognite_toolkit/cdf.py +++ b/cognite_toolkit/cdf.py @@ -238,6 +238,13 @@ def deploy( help="Whether to drop existing configurations, drop per resource if present.", ), ] = False, + drop_data: Annotated[ + Optional[bool], + typer.Option( + "--drop-data", + help="Whether to drop existing data in data model containers and spaces.", + ), + ] = False, dry_run: Annotated[ Optional[bool], typer.Option( @@ -290,7 +297,7 @@ def deploy( drop=drop, action="deploy", dry_run=dry_run, - drop_data=False, + drop_data=drop_data, verbose=ctx.obj.verbose, ) results = DeployResults([], "deploy", dry_run=dry_run) From 31ce30fde0a2dcc9508f3304e9aa31595b683772 Mon Sep 17 00:00:00 2001 From: Greger Wedel Date: Fri, 15 Dec 2023 15:40:52 +0100 Subject: [PATCH 81/90] Fix error on data set for clean --- cognite_toolkit/cdf_tk/load.py | 17 +++++++++++------ cognite_toolkit/cdf_tk/templates.py | 2 +- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index 0a87a0e16..dc581d893 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -792,9 +792,6 @@ def delete(self, ids: Sequence[str], drop_data: bool) -> int: self.client.transformations.schedules.delete(external_id=ids, ignore_unknown_ids=False) return len(ids) except CogniteNotFoundError as e: - print( - f" [bold yellow]WARNING:[/] {len(e.not_found)} out of {len(ids)} transformation schedules do(es) not exist." - ) return len(ids) - len(e.not_found) def create(self, items: Sequence[Transformation], drop: bool, filepath: Path) -> TransformationList: @@ -1349,8 +1346,11 @@ def deploy_or_clean_resources( # as these resources share the same folder. pattern = re.compile(loader.filename_pattern) filepaths = [file for file in filepaths if pattern.match(file.stem)] - - items = [loader.load_resource(f, dry_run) for f in filepaths] + if action == "clean": + # If we do a clean, we do not want to verify that everything exists wrt data sets, spaces etc. + items = [loader.load_resource(f, dry_run=True) for f in filepaths] + else: + items = [loader.load_resource(f, dry_run) for f in filepaths] items = [item for item in items if item is not None] nr_of_batches = len(items) nr_of_items = sum(len(item) if isinstance(item, Sized) else 1 for item in items) @@ -1364,7 +1364,12 @@ def deploy_or_clean_resources( print(f"[bold]{action_word} {nr_of_items} {loader.display_name} in {nr_of_batches} batches to CDF...[/]") batches = [item if isinstance(item, Sized) else [item] for item in items] if drop and loader.support_drop and action == "deploy": - print(f" --drop is specified, will delete existing {loader.display_name} before uploading.") + if drop_data and (loader.api_name == "data_modeling.spaces" or loader.api_name == "data_modeling.containers"): + print( + f" --drop-data is specified, will delete existing nodes and edges before before deleting {loader.display_name}." + ) + else: + print(f" --drop is specified, will delete existing {loader.display_name} before uploading.") # Deleting resources. nr_of_deleted = 0 diff --git a/cognite_toolkit/cdf_tk/templates.py b/cognite_toolkit/cdf_tk/templates.py index 36aca4f8e..abb5a8616 100644 --- a/cognite_toolkit/cdf_tk/templates.py +++ b/cognite_toolkit/cdf_tk/templates.py @@ -421,7 +421,7 @@ def build_config( print(f" {warning}") process_config_files(source_dir, selected_modules, build_dir, config, build.name, verbose) build.dump_to_file(build_dir) - print(f" [bold green]INFO:[/] Build complete. Files are located in {build_dir!s}.") + print(f" [bold green]INFO:[/] Build complete. Files are located in {build_dir!s}/") def generate_config( From 4fc94ec779d28c35ebbbcf3d494d0dc141a65014 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 15:43:37 +0100 Subject: [PATCH 82/90] Marked change BREAKING --- CHANGELOG.templates.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index fcc473ec9..1fc9dd947 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -31,6 +31,7 @@ Changes are grouped as follows: - **BREAKING** All externalIds and names have been changed to follow the naming conventions for resources in `examples/cdf_oid_example_data`, `examples/cdf_apm_simple_data_model`, `modules/cdf_apm_base`, `modules/cdf_infield_common`, and `modules/cdf_infield_location`. +- **BREAKING** Transformation Schedules broken out into separate files, following naming convention `.schedule.yaml`. - All cognite templates have been moved into `cognite_templates` folder, while `local_templates` is renamed to `custom_templates`. - Move cdf_apm_base into separate folder. - The file `local.yaml` has been renamed `environments.yaml` to better reflect its purpose. @@ -39,7 +40,6 @@ Changes are grouped as follows: - Combined the child and parent transformations `sync_assets_from_hierarchy_to_apm` in `cdf_infield_location`. This has the benefit of not having to wait for the parent transformation to finish before starting the child transformation, thus no longer a dependency between the two transformations. -- Transformation Schedules broken out into separate files, following naming convention `.schedule.yaml`. ### Fixed From eadfd7060dd4f8da16843141c862438769508fd5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 15:54:05 +0100 Subject: [PATCH 83/90] Deleted files that shouldn't have been there in the first place (#228) --- tr_asset_oid_workmate_apm_simple_load_assets.schedule.yaml | 3 --- ...ries_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml | 3 --- ...korder_oid_workmate_apm_simple_load_workitems.schedule.yaml | 3 --- ...oid_workmate_apm_simple_load_workitems2assets.schedule.yaml | 3 --- ...workmate_apm_simple_load_workitems2workorders.schedule.yaml | 3 --- ...order_oid_workmate_apm_simple_load_workorders.schedule.yaml | 3 --- ...id_workmate_apm_simple_load_workorders2assets.schedule.yaml | 3 --- 7 files changed, 21 deletions(-) delete mode 100644 tr_asset_oid_workmate_apm_simple_load_assets.schedule.yaml delete mode 100644 tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml delete mode 100644 tr_workorder_oid_workmate_apm_simple_load_workitems.schedule.yaml delete mode 100644 tr_workorder_oid_workmate_apm_simple_load_workitems2assets.schedule.yaml delete mode 100644 tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.schedule.yaml delete mode 100644 tr_workorder_oid_workmate_apm_simple_load_workorders.schedule.yaml delete mode 100644 tr_workorder_oid_workmate_apm_simple_load_workorders2assets.schedule.yaml diff --git a/tr_asset_oid_workmate_apm_simple_load_assets.schedule.yaml b/tr_asset_oid_workmate_apm_simple_load_assets.schedule.yaml deleted file mode 100644 index cf16ac24c..000000000 --- a/tr_asset_oid_workmate_apm_simple_load_assets.schedule.yaml +++ /dev/null @@ -1,3 +0,0 @@ -externalId: tr_asset_oid_workmate_apm_simple_load_assets -interval: 7 * * * * -isPaused: true diff --git a/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml b/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml deleted file mode 100644 index 546c15610..000000000 --- a/tr_timeseries_oid_pi_apm_simple_load_timeseries2assets.schedule.yaml +++ /dev/null @@ -1,3 +0,0 @@ -externalId: tr_timeseries_oid_pi_apm_simple_load_timeseries2assets -interval: 7 * * * * -isPaused: true diff --git a/tr_workorder_oid_workmate_apm_simple_load_workitems.schedule.yaml b/tr_workorder_oid_workmate_apm_simple_load_workitems.schedule.yaml deleted file mode 100644 index 07566d49c..000000000 --- a/tr_workorder_oid_workmate_apm_simple_load_workitems.schedule.yaml +++ /dev/null @@ -1,3 +0,0 @@ -externalId: tr_workorder_oid_workmate_apm_simple_load_workitems -interval: 7 * * * * -isPaused: true diff --git a/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.schedule.yaml b/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.schedule.yaml deleted file mode 100644 index 3f741a1d9..000000000 --- a/tr_workorder_oid_workmate_apm_simple_load_workitems2assets.schedule.yaml +++ /dev/null @@ -1,3 +0,0 @@ -externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2assets -interval: 7 * * * * -isPaused: true diff --git a/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.schedule.yaml b/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.schedule.yaml deleted file mode 100644 index c82d49442..000000000 --- a/tr_workorder_oid_workmate_apm_simple_load_workitems2workorders.schedule.yaml +++ /dev/null @@ -1,3 +0,0 @@ -externalId: tr_workorder_oid_workmate_apm_simple_load_workitems2workorders -interval: 7 * * * * -isPaused: true diff --git a/tr_workorder_oid_workmate_apm_simple_load_workorders.schedule.yaml b/tr_workorder_oid_workmate_apm_simple_load_workorders.schedule.yaml deleted file mode 100644 index 1430b2aee..000000000 --- a/tr_workorder_oid_workmate_apm_simple_load_workorders.schedule.yaml +++ /dev/null @@ -1,3 +0,0 @@ -externalId: tr_workorder_oid_workmate_apm_simple_load_workorders -interval: 7 * * * * -isPaused: true diff --git a/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.schedule.yaml b/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.schedule.yaml deleted file mode 100644 index db4b143cb..000000000 --- a/tr_workorder_oid_workmate_apm_simple_load_workorders2assets.schedule.yaml +++ /dev/null @@ -1,3 +0,0 @@ -externalId: tr_workorder_oid_workmate_apm_simple_load_workorders2assets -interval: 7 * * * * -isPaused: true From 8365eec40a4f16c9cca58a1ece4f4ac87d5320f6 Mon Sep 17 00:00:00 2001 From: Greger Teigre Wedel Date: Fri, 15 Dec 2023 16:09:19 +0100 Subject: [PATCH 84/90] Gtw/release-candidate-beta (#229) * Fix release flow * Fix release version tests * Bump version * Upgrade demo version of templates --- .github/workflows/release.yaml | 2 +- CHANGELOG.cdf-tk.md | 2 +- CHANGELOG.templates.md | 3 +-- cognite_toolkit/_version.py | 2 +- cognite_toolkit/environments.yaml | 2 +- demo/environments.yaml | 2 +- pyproject.toml | 2 +- tests/test_build.py | 8 +++++--- 8 files changed, 12 insertions(+), 11 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 44ef367fb..ac61510bc 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -2,7 +2,7 @@ name: Release pypi package on: push: branches: - - release* + - release env: PYTHON_VERSION: '3.11' jobs: diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index ea74de7de..4dd30c1c4 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -15,7 +15,7 @@ Changes are grouped as follows: - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. -## [TBD] - 2023-12-TBD +## [0.1.0b1] - 2023-12-15 ### Added diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index 1fc9dd947..b18d13bb3 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -15,7 +15,7 @@ Changes are grouped as follows: - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. -## TBD - 2023-12-TBD +## [0.1.0b1] - 2023-12-15 ### Added @@ -46,7 +46,6 @@ Changes are grouped as follows: - Removed transformation identity provider variables from modules and reused the global cicd_ prefixed ones. - Ensure all transformations in `cognite_modules` are prefixed with `tr_` and all spaces are prefixed with `sp_`. - ## [0.1.0a3] - 2023-11-29 ### Changed diff --git a/cognite_toolkit/_version.py b/cognite_toolkit/_version.py index 1822f2d18..eb8f3775b 100644 --- a/cognite_toolkit/_version.py +++ b/cognite_toolkit/_version.py @@ -1 +1 @@ -__version__ = "0.1.0a3" +__version__ = "0.1.0b1" diff --git a/cognite_toolkit/environments.yaml b/cognite_toolkit/environments.yaml index 1057f19f7..9e2941970 100644 --- a/cognite_toolkit/environments.yaml +++ b/cognite_toolkit/environments.yaml @@ -53,4 +53,4 @@ prod: # DO NOT EDIT THE LINES BELOW! # This part is used by cdf-toolkit to keep track of the version and help you upgrade. __system: - cdf_toolkit_version: 0.1.0a3 + cdf_toolkit_version: 0.1.0b1 diff --git a/demo/environments.yaml b/demo/environments.yaml index c1fd66f98..43b1a5e92 100644 --- a/demo/environments.yaml +++ b/demo/environments.yaml @@ -26,4 +26,4 @@ demo: # DO NOT EDIT THE LINES BELOW! # This part is used by cdf-toolkit to keep track of the version and help you upgrade. __system: - cdf_toolkit_version: 0.1.0a3 + cdf_toolkit_version: 0.1.0b1 diff --git a/pyproject.toml b/pyproject.toml index 8b6a82319..a65d47e05 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "cognite_toolkit" -version = "0.1.0a3" +version = "0.1.0b1" description = "Official Cognite Data Fusion tool for project templates and configuration deployment" authors = ["Cognite AS "] license = "Apache-2" diff --git a/tests/test_build.py b/tests/test_build.py index e1474dca3..f3332cfbf 100644 --- a/tests/test_build.py +++ b/tests/test_build.py @@ -35,7 +35,7 @@ def test_changelog_entry_version_matches(package_version: str, changelog_name: s match = next(_parse_changelog(changelog_name)) changelog_version = match.group(1) assert changelog_version == package_version, ( - f"The latest entry in 'CHANGELOG.md' has a different version ({changelog_version}) than " + f"The latest entry in '{changelog_name}' has a different version ({changelog_version}) than " f"cognite_toolkit/_version.py: ({__version__}). Did you forgot to add a new entry? " "Or maybe you haven't followed the required format?" ) @@ -68,7 +68,9 @@ def test_changelog_entry_date(changelog_name: str) -> None: try: datetime.strptime(date := match.group(3), "%Y-%m-%d") except Exception: - assert False, f"Date given in the newest entry in 'CHANGELOG.md', {date!r}, is not valid/parsable (YYYY-MM-DD)" + assert ( + False + ), f"Date given in the newest entry in '{changelog_name}', {date!r}, is not valid/parsable (YYYY-MM-DD)" else: assert True @@ -95,4 +97,4 @@ def test_environment_system_variables_updated() -> None: def _parse_changelog(changelog: str) -> Iterator[Match[str]]: changelog = (REPO_ROOT / changelog).read_text(encoding="utf-8") - return re.finditer(r"##\s\[(\d+\.\d+\.\d+(a\d+)?)\]\s-\s(\d+-\d+-\d+)", changelog) + return re.finditer(r"##\s\[(\d+\.\d+\.\d+([ab]\d+)?)\]\s-\s(\d+-\d+-\d+)", changelog) From b247375abe18645d157b44708c98948c62565551 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 16:26:50 +0100 Subject: [PATCH 85/90] fix --- cognite_toolkit/cdf_tk/load.py | 2 +- ..._asset_oid_workmate_asset_hierarchy_example.schedule.yaml} | 2 +- ....sql => tr_asset_oid_workmate_asset_hierarchy_example.sql} | 0 ...aml => tr_asset_oid_workmate_asset_hierarchy_example.yaml} | 4 ++-- 4 files changed, 4 insertions(+), 4 deletions(-) rename cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/{tr_asset_oid_workmate_asset_hierarchy.schedule.yaml => tr_asset_oid_workmate_asset_hierarchy_example.schedule.yaml} (85%) rename cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/{tr_asset_oid_workmate_asset_hierarchy.sql => tr_asset_oid_workmate_asset_hierarchy_example.sql} (100%) rename cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/{tr_asset_oid_workmate_asset_hierarchy.yaml => tr_asset_oid_workmate_asset_hierarchy_example.yaml} (96%) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index dc581d893..ed1434275 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -794,7 +794,7 @@ def delete(self, ids: Sequence[str], drop_data: bool) -> int: except CogniteNotFoundError as e: return len(ids) - len(e.not_found) - def create(self, items: Sequence[Transformation], drop: bool, filepath: Path) -> TransformationList: + def create(self, items: Sequence[TransformationSchedule], drop: bool, filepath: Path) -> TransformationScheduleList: return self.client.transformations.schedules.create(items) diff --git a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.schedule.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.schedule.yaml similarity index 85% rename from cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.schedule.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.schedule.yaml index 4397716cb..340de856d 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.schedule.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.schedule.yaml @@ -1,3 +1,3 @@ -externalId: tr_asset_{{default_location}}_{{source_asset}}_asset_hierarchy +externalId: tr_asset_{{default_location}}_{{source_asset}}_asset_hierarchy_example interval: '{{scheduleHourly}}' isPaused: false \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.sql b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.sql similarity index 100% rename from cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.sql rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.sql diff --git a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.yaml similarity index 96% rename from cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.yaml index 7e5e298e7..d7fa34c4f 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.yaml @@ -1,5 +1,5 @@ -externalId: tr_asset_{{default_location}}_{{source_asset}}_asset_hierarchy -name: asset:{{default_location}}:{{source_asset}}:asset_hierarchy +externalId: tr_asset_{{default_location}}_{{source_asset}}_asset_hierarchy_example +name: asset:{{default_location}}:{{source_asset}}:asset_hierarchy:example destination: type: "asset_hierarchy" ignoreNullFields: true From fa3921546c8fe067403947d3b730aa7599516012 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 16:29:54 +0100 Subject: [PATCH 86/90] test regen --- .../cdf_oid_example_data.yaml | 10 +++++----- .../cdf_oid_example_data.yaml | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml b/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml index 7f11ef749..6f407f561 100644 --- a/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml +++ b/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml @@ -669,10 +669,10 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} - externalId: tr_asset_oid_workmate_asset_hierarchy + externalId: tr_asset_oid_workmate_asset_hierarchy_example ignoreNullFields: true isPublic: true - name: asset:oid:workmate:asset_hierarchy + name: asset:oid:workmate:asset_hierarchy:example ownerIsCurrentUser: true query: "select\n cast(`externalId` as STRING) as externalId,\n cast(`externalId`\ \ as STRING) as name,\n cast(`description` as STRING) as description,\n cast(`sourceDb`\ @@ -686,7 +686,7 @@ Transformation: scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: -- externalId: tr_asset_oid_workmate_asset_hierarchy +- externalId: tr_asset_oid_workmate_asset_hierarchy_example interval: 7 * * * * isPaused: false deleted: @@ -739,6 +739,6 @@ deleted: - externalId: pi_163657 - externalId: pi_191092 Transformation: - - externalId: tr_asset_oid_workmate_asset_hierarchy + - externalId: tr_asset_oid_workmate_asset_hierarchy_example TransformationSchedule: - - externalId: tr_asset_oid_workmate_asset_hierarchy + - externalId: tr_asset_oid_workmate_asset_hierarchy_example diff --git a/tests/test_approval_modules_snapshots_clean/cdf_oid_example_data.yaml b/tests/test_approval_modules_snapshots_clean/cdf_oid_example_data.yaml index e57ea2f90..b4b834eee 100644 --- a/tests/test_approval_modules_snapshots_clean/cdf_oid_example_data.yaml +++ b/tests/test_approval_modules_snapshots_clean/cdf_oid_example_data.yaml @@ -48,6 +48,6 @@ deleted: - externalId: pi_163657 - externalId: pi_191092 Transformation: - - externalId: tr_asset_oid_workmate_asset_hierarchy + - externalId: tr_asset_oid_workmate_asset_hierarchy_example TransformationSchedule: - - externalId: tr_asset_oid_workmate_asset_hierarchy + - externalId: tr_asset_oid_workmate_asset_hierarchy_example From a9a26e7ce594f5ace0a03356ed57759febc4d930 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 16:31:17 +0100 Subject: [PATCH 87/90] fix (#230) * fix * test regen --- cognite_toolkit/cdf_tk/load.py | 2 +- ...oid_workmate_asset_hierarchy_example.schedule.yaml} | 2 +- ... tr_asset_oid_workmate_asset_hierarchy_example.sql} | 0 ...tr_asset_oid_workmate_asset_hierarchy_example.yaml} | 4 ++-- .../cdf_oid_example_data.yaml | 10 +++++----- .../cdf_oid_example_data.yaml | 4 ++-- 6 files changed, 11 insertions(+), 11 deletions(-) rename cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/{tr_asset_oid_workmate_asset_hierarchy.schedule.yaml => tr_asset_oid_workmate_asset_hierarchy_example.schedule.yaml} (85%) rename cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/{tr_asset_oid_workmate_asset_hierarchy.sql => tr_asset_oid_workmate_asset_hierarchy_example.sql} (100%) rename cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/{tr_asset_oid_workmate_asset_hierarchy.yaml => tr_asset_oid_workmate_asset_hierarchy_example.yaml} (96%) diff --git a/cognite_toolkit/cdf_tk/load.py b/cognite_toolkit/cdf_tk/load.py index dc581d893..ed1434275 100644 --- a/cognite_toolkit/cdf_tk/load.py +++ b/cognite_toolkit/cdf_tk/load.py @@ -794,7 +794,7 @@ def delete(self, ids: Sequence[str], drop_data: bool) -> int: except CogniteNotFoundError as e: return len(ids) - len(e.not_found) - def create(self, items: Sequence[Transformation], drop: bool, filepath: Path) -> TransformationList: + def create(self, items: Sequence[TransformationSchedule], drop: bool, filepath: Path) -> TransformationScheduleList: return self.client.transformations.schedules.create(items) diff --git a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.schedule.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.schedule.yaml similarity index 85% rename from cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.schedule.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.schedule.yaml index 4397716cb..340de856d 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.schedule.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.schedule.yaml @@ -1,3 +1,3 @@ -externalId: tr_asset_{{default_location}}_{{source_asset}}_asset_hierarchy +externalId: tr_asset_{{default_location}}_{{source_asset}}_asset_hierarchy_example interval: '{{scheduleHourly}}' isPaused: false \ No newline at end of file diff --git a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.sql b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.sql similarity index 100% rename from cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.sql rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.sql diff --git a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.yaml similarity index 96% rename from cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml rename to cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.yaml index 7e5e298e7..d7fa34c4f 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_oid_example_data/transformations/tr_asset_oid_workmate_asset_hierarchy_example.yaml @@ -1,5 +1,5 @@ -externalId: tr_asset_{{default_location}}_{{source_asset}}_asset_hierarchy -name: asset:{{default_location}}:{{source_asset}}:asset_hierarchy +externalId: tr_asset_{{default_location}}_{{source_asset}}_asset_hierarchy_example +name: asset:{{default_location}}:{{source_asset}}:asset_hierarchy:example destination: type: "asset_hierarchy" ignoreNullFields: true diff --git a/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml b/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml index 7f11ef749..6f407f561 100644 --- a/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml +++ b/tests/test_approval_modules_snapshots/cdf_oid_example_data.yaml @@ -669,10 +669,10 @@ Transformation: clientSecret: ${IDP_CLIENT_SECRET} scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} - externalId: tr_asset_oid_workmate_asset_hierarchy + externalId: tr_asset_oid_workmate_asset_hierarchy_example ignoreNullFields: true isPublic: true - name: asset:oid:workmate:asset_hierarchy + name: asset:oid:workmate:asset_hierarchy:example ownerIsCurrentUser: true query: "select\n cast(`externalId` as STRING) as externalId,\n cast(`externalId`\ \ as STRING) as name,\n cast(`description` as STRING) as description,\n cast(`sourceDb`\ @@ -686,7 +686,7 @@ Transformation: scopes: ${IDP_SCOPES} tokenUri: ${IDP_TOKEN_URL} TransformationSchedule: -- externalId: tr_asset_oid_workmate_asset_hierarchy +- externalId: tr_asset_oid_workmate_asset_hierarchy_example interval: 7 * * * * isPaused: false deleted: @@ -739,6 +739,6 @@ deleted: - externalId: pi_163657 - externalId: pi_191092 Transformation: - - externalId: tr_asset_oid_workmate_asset_hierarchy + - externalId: tr_asset_oid_workmate_asset_hierarchy_example TransformationSchedule: - - externalId: tr_asset_oid_workmate_asset_hierarchy + - externalId: tr_asset_oid_workmate_asset_hierarchy_example diff --git a/tests/test_approval_modules_snapshots_clean/cdf_oid_example_data.yaml b/tests/test_approval_modules_snapshots_clean/cdf_oid_example_data.yaml index e57ea2f90..b4b834eee 100644 --- a/tests/test_approval_modules_snapshots_clean/cdf_oid_example_data.yaml +++ b/tests/test_approval_modules_snapshots_clean/cdf_oid_example_data.yaml @@ -48,6 +48,6 @@ deleted: - externalId: pi_163657 - externalId: pi_191092 Transformation: - - externalId: tr_asset_oid_workmate_asset_hierarchy + - externalId: tr_asset_oid_workmate_asset_hierarchy_example TransformationSchedule: - - externalId: tr_asset_oid_workmate_asset_hierarchy + - externalId: tr_asset_oid_workmate_asset_hierarchy_example From 852df59c7e19cc44b5d14e68da3e8443587349bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 16:38:23 +0100 Subject: [PATCH 88/90] demofix --- demo/postproc.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/demo/postproc.py b/demo/postproc.py index 475a58d92..3d316247a 100755 --- a/demo/postproc.py +++ b/demo/postproc.py @@ -20,8 +20,10 @@ def run() -> None: print("Doing post-processing activities for demo project...") ToolGlobals = CDFToolConfig() try: - print("Running tr_asset_oid_workmate_asset_hierarchy...") - ToolGlobals.client.transformations.run(transformation_external_id="tr_asset_oid_workmate_asset_hierarchy") + print("Running tr_asset_oid_workmate_asset_hierarchy_example...") + ToolGlobals.client.transformations.run( + transformation_external_id="tr_asset_oid_workmate_asset_hierarchy_example" + ) print("Running tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities...") ToolGlobals.client.transformations.run( transformation_external_id="tr_workorder_oid_workmate_infield_sync_workorders_to_apm_activities" From 22472cabda43ccf3264679b00e3128d48c50c2cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 16:59:50 +0100 Subject: [PATCH 89/90] fix (#234) --- .../tr_asset_oid_workmate_asset_hierarchy.yaml | 2 +- demo/postproc.py | 6 ------ 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml index 8449e7086..730670f01 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml @@ -1,5 +1,5 @@ externalId: 'tr_asset_{{location_name}}_{{source_name}}_asset_hierarchy' -dataSetExternalId: 'ds_asset_{{location_name}}' +dataSetExternalId: 'ds_transformation_{{location_name}}' name: 'asset:{{location_name}}:{{source_name}}:asset_hierarchy' destination: type: "asset_hierarchy" diff --git a/demo/postproc.py b/demo/postproc.py index 3d316247a..b14f1ea41 100755 --- a/demo/postproc.py +++ b/demo/postproc.py @@ -34,12 +34,6 @@ def run() -> None: ToolGlobals.client.transformations.run( transformation_external_id="tr_asset_oid_workmate_infield_sync_assets_from_hierarchy_to_apm" ) - # Wait until assets are in data models - time.sleep(10.0) - print("Running tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm...") - ToolGlobals.client.transformations.run( - transformation_external_id="tr_asset_oid_workmate_infield_sync_asset_parents_from_hierarchy_to_apm" - ) except Exception as e: log.error(f"Failed to run post-processing activities for demo project:\n{e}") exit(1) From 1d42ba32384bf4a10c550ad3a3965f37b5ed7637 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?P=C3=A5l=20R=C3=B8nning?= Date: Fri, 15 Dec 2023 17:04:29 +0100 Subject: [PATCH 90/90] Conflict resolution (#236) * fix * s --- .../transformations/tr_asset_oid_workmate_asset_hierarchy.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml index 730670f01..c6c2e107c 100644 --- a/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml +++ b/cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_asset_valhall/transformations/tr_asset_oid_workmate_asset_hierarchy.yaml @@ -1,5 +1,5 @@ externalId: 'tr_asset_{{location_name}}_{{source_name}}_asset_hierarchy' -dataSetExternalId: 'ds_transformation_{{location_name}}' +dataSetExternalId: 'ds_transformations_{{location_name}}' name: 'asset:{{location_name}}:{{source_name}}:asset_hierarchy' destination: type: "asset_hierarchy"