diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d10f04781..beaec9050 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,7 +26,7 @@ repos: #- id: trailing-whitespace - repo: https://github.com/igorshubovych/markdownlint-cli - rev: v0.40.0 + rev: v0.41.0 hooks: - id: markdownlint diff --git a/.vscode/launch.json b/.vscode/launch.json index 74db6d113..c89e60501 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -104,6 +104,20 @@ "program": "./cognite_toolkit/cognite_modules/examples/cdf_data_pipeline_files_valhall/functions/fn_workflow_files_oid_fileshare_annotation/handler.py", "console": "integratedTerminal", "justMyCode": false - } + }, + { + "name": "Python: init", + "type": "debugpy", + "request": "launch", + "program": "./cdf-tk-dev.py", + "args": [ + "init", + //"--dry-run", + //"--env=local", + //"--include=transformations" + ], + "console": "integratedTerminal", + "justMyCode": false + }, ] } \ No newline at end of file diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index f6d9155a0..d73cf4d45 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -15,6 +15,28 @@ Changes are grouped as follows: - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. +## [0.2.0b1] - 2024-05-20 + +### Added + +- Support for loading `nodes` with `APICall` arguments. The typical use case is when `node types` are part of a + data model, and the default `APICall` arguments works well. + +### Fixed + +- Error message displayed to console on failed `cdf-tk deploy` command could be modified. This is now fixed. +- Using display name instead of folder name on a failed `cdf-tk deploy` or `cdf-tk clean` command. For example, + if `datapoints subscription` was failing the error message would be `Failure to load/deploy timeseries as expected`, + now it is `Failure to load/deploy timeseries.subscription as expected`. +- Unique display names for all resource types. +- Fixed bug when deploying extraction pipeline config, when none existed from before: + `There is no config stored for the extraction pipeline`. + +### Changed + +- In `config.[env].yaml`, in the `environment` section, `selected_modules_and_packages` is renamed to `selected`. + The old names will still work, but will trigger a deprecation warning. + ## [0.2.0a5] - 2024-05-28 ### Added diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index cac39f2fa..88861d2bc 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -15,6 +15,13 @@ Changes are grouped as follows: - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. +## [0.2.0b1] - 2024-05-20 + +### Fixed + +- Removed illegal characters from `DatapointSubscriptoin` description in + `cognite_modules/examples/my_example_module`. + ## [0.2.0a5] - 2024-05-28 ### Added diff --git a/README.md b/README.md index 07fb1316b..bbde1becf 100644 --- a/README.md +++ b/README.md @@ -16,6 +16,16 @@ It supports three different modes of operation: bundled with templates useful for getting started with Cognite Data Fusion, as well as for specific use cases delivered by Cognite or its partners. You can also create your own templates and share them. +## Usage + +Install the Toolkit by running: + +```bash +pip install cognite-toolkit +``` + +Then run `cdf-tk --help` to get started with the interactive command-line tool. + ## For more information More details about the tool can be found at diff --git a/cdf-tk-dev.py b/cdf-tk-dev.py index 4da9748ad..c17e3fc85 100755 --- a/cdf-tk-dev.py +++ b/cdf-tk-dev.py @@ -46,7 +46,7 @@ name="local", project="", build_type="dev", - selected_modules_and_packages=["cdf_demo_infield", "cdf_oid_example_data"], + selected=["cdf_demo_infield", "cdf_oid_example_data"], ) ).load_defaults(REPO_ROOT / "cognite_toolkit") (REPO_ROOT / "cognite_toolkit" / "config.local.yaml").write_text(config_init.dump_yaml_with_comments()) diff --git a/cognite_toolkit/_api/data_classes.py b/cognite_toolkit/_api/data_classes.py index f9dd9e600..b057694f8 100644 --- a/cognite_toolkit/_api/data_classes.py +++ b/cognite_toolkit/_api/data_classes.py @@ -21,7 +21,7 @@ name="not used", project="not used", build_type="not used", - selected_modules_and_packages=[], + selected=[], ) diff --git a/cognite_toolkit/_api/modules_api.py b/cognite_toolkit/_api/modules_api.py index 0ed5aeaa4..57f292676 100644 --- a/cognite_toolkit/_api/modules_api.py +++ b/cognite_toolkit/_api/modules_api.py @@ -78,7 +78,7 @@ def _build(self, modules: Sequence[ModuleMeta], verbose: bool) -> None: name=self._build_env, project=self._project_name, build_type=self._build_env, - selected_modules_and_packages=[module.name for module in modules], + selected=[module.name for module in modules], ), filepath=Path(""), variables=variables, diff --git a/cognite_toolkit/_cdf.py b/cognite_toolkit/_cdf.py index 4cc82edfb..451e3e6b1 100755 --- a/cognite_toolkit/_cdf.py +++ b/cognite_toolkit/_cdf.py @@ -31,6 +31,7 @@ NodeLoader, TransformationLoader, ) +from cognite_toolkit._cdf_tk.prototypes import featureflag from cognite_toolkit._cdf_tk.templates import ( COGNITE_MODULES, ) @@ -89,6 +90,10 @@ def app() -> NoReturn: # --- Main entry point --- # Users run 'app()' directly, but that doesn't allow us to control excepton handling: try: + if featureflag.enabled("FF_INTERACTIVE_INIT"): + from cognite_toolkit._cdf_tk.prototypes.interactive_init import InteractiveInit + + _app.command("init")(InteractiveInit().interactive) _app() except ToolkitError as err: print(f" [bold red]ERROR ([/][red]{type(err).__name__}[/][bold red]):[/] {err}") @@ -453,7 +458,7 @@ def auth_verify( raise ToolkitValidationError("Failure to verify access rights.") -@_app.command("init") +@_app.command("init" if not featureflag.enabled("FF_INTERACTIVE_INIT") else "_init") def main_init( ctx: typer.Context, dry_run: Annotated[ diff --git a/cognite_toolkit/_cdf_tk/_parameters/data_classes.py b/cognite_toolkit/_cdf_tk/_parameters/data_classes.py index 78f39857d..91ace9359 100644 --- a/cognite_toolkit/_cdf_tk/_parameters/data_classes.py +++ b/cognite_toolkit/_cdf_tk/_parameters/data_classes.py @@ -173,9 +173,10 @@ def __sub__(self, other: AbstractSet) -> ParameterSet[T_Parameter]: class ParameterSpecSet(ParameterSet[ParameterSpec]): - def __init__(self, iterable: Iterable[ParameterSpec] = ()) -> None: + def __init__(self, iterable: Iterable[ParameterSpec] = (), spec_name: str | None = None) -> None: super().__init__(iterable) self.is_complete = True + self.spec_name = spec_name def required(self, level: int | None = None) -> ParameterSet[ParameterSpec]: if level is None: diff --git a/cognite_toolkit/_cdf_tk/commands/build.py b/cognite_toolkit/_cdf_tk/commands/build.py index 86cb9acb7..05f69df12 100644 --- a/cognite_toolkit/_cdf_tk/commands/build.py +++ b/cognite_toolkit/_cdf_tk/commands/build.py @@ -134,12 +134,12 @@ def build_config( if duplicate_modules := { module_name: paths for module_name, paths in module_parts_by_name.items() - if len(paths) > 1 and module_name in config.environment.selected_modules_and_packages + if len(paths) > 1 and module_name in config.environment.selected }: raise ToolkitDuplicatedModuleError( f"Ambiguous module selected in config.{config.environment.name}.yaml:", duplicate_modules ) - system_config.validate_modules(available_modules, config.environment.selected_modules_and_packages) + system_config.validate_modules(available_modules, config.environment.selected) selected_modules = config.get_selected_modules(system_config.packages, available_modules, verbose) diff --git a/cognite_toolkit/_cdf_tk/commands/clean.py b/cognite_toolkit/_cdf_tk/commands/clean.py index 8ef6e22bc..573a8f8ad 100644 --- a/cognite_toolkit/_cdf_tk/commands/clean.py +++ b/cognite_toolkit/_cdf_tk/commands/clean.py @@ -282,7 +282,7 @@ def execute( print(results.counts_table()) if results and results.has_uploads: print(results.uploads_table()) - raise ToolkitCleanResourceError(f"Failure to clean {loader_cls.folder_name} as expected.") + raise ToolkitCleanResourceError(f"Failure to clean {loader.display_name} as expected.") if results.has_counts: print(results.counts_table()) if results.has_uploads: diff --git a/cognite_toolkit/_cdf_tk/commands/deploy.py b/cognite_toolkit/_cdf_tk/commands/deploy.py index 24ca0ddb6..d84b8d669 100644 --- a/cognite_toolkit/_cdf_tk/commands/deploy.py +++ b/cognite_toolkit/_cdf_tk/commands/deploy.py @@ -149,8 +149,9 @@ def execute( if drop or drop_data: print(Panel("[bold]DEPLOYING resources...[/]")) for loader_cls in ordered_loaders: + loader_instance = loader_cls.create_loader(ToolGlobals, build_dir) result = self.deploy_resources( - loader_cls.create_loader(ToolGlobals, build_dir), + loader_instance, ToolGlobals=ToolGlobals, dry_run=dry_run, has_done_drop=drop, @@ -162,7 +163,7 @@ def execute( print(results.counts_table()) if results and results.has_uploads: print(results.uploads_table()) - raise ToolkitDeployResourceError(f"Failure to load/deploy {loader_cls.folder_name} as expected.") + raise ToolkitDeployResourceError(f"Failure to load/deploy {loader_instance.display_name} as expected.") if result: results[result.name] = result if ctx.obj.verbose: @@ -366,7 +367,8 @@ def _create_resources(self, resources: T_CogniteResourceList, loader: ResourceLo if e.code == 409: self.warn(LowSeverityWarning("Resource(s) already exist(s), skipping creation.")) else: - print(f"[bold red]ERROR:[/] Failed to create resource(s).\n{e}") + print("[bold red]ERROR:[/] Failed to create resource(s).\n") + print(e) return None except CogniteDuplicatedError as e: self.warn( diff --git a/cognite_toolkit/_cdf_tk/commands/pull.py b/cognite_toolkit/_cdf_tk/commands/pull.py index d0eee0b87..3f302b97a 100644 --- a/cognite_toolkit/_cdf_tk/commands/pull.py +++ b/cognite_toolkit/_cdf_tk/commands/pull.py @@ -388,7 +388,7 @@ def pull_command( system_config = SystemYAML.load_from_directory(source_path, env) config = BuildConfigYAML.load_from_directory(source_path, env) config.set_environment_variables() - config.environment.selected_modules_and_packages = config.available_modules + config.environment.selected = config.available_modules print( Panel.fit( f"[bold]Building all modules found in {config.filepath} (not only the modules under " diff --git a/cognite_toolkit/_cdf_tk/commands/run.py b/cognite_toolkit/_cdf_tk/commands/run.py index 0d0a8667b..e541837b9 100644 --- a/cognite_toolkit/_cdf_tk/commands/run.py +++ b/cognite_toolkit/_cdf_tk/commands/run.py @@ -192,7 +192,7 @@ def run_local_function( continue for path in function_dir.iterdir(): if path.is_dir() and path.name == external_id: - config.environment.selected_modules_and_packages = [module_from_path(path)] + config.environment.selected = [module_from_path(path)] found = True break diff --git a/cognite_toolkit/_cdf_tk/load/_resource_loaders.py b/cognite_toolkit/_cdf_tk/load/_resource_loaders.py index ada1ec045..1442c8df3 100644 --- a/cognite_toolkit/_cdf_tk/load/_resource_loaders.py +++ b/cognite_toolkit/_cdf_tk/load/_resource_loaders.py @@ -108,6 +108,7 @@ DataModelApplyList, DataModelList, Node, + NodeApply, NodeApplyResultList, NodeList, Space, @@ -167,7 +168,7 @@ ) from ._base_loaders import ResourceContainerLoader, ResourceLoader -from .data_classes import LoadedNode, LoadedNodeList, RawDatabaseTable, RawTableList +from .data_classes import NodeApplyListWithCall, RawDatabaseTable, RawTableList _MIN_TIMESTAMP_MS = -2208988800000 # 1900-01-01 00:00:00.000 _MAX_TIMESTAMP_MS = 4102444799999 # 2099-12-31 23:59:59.999 @@ -823,6 +824,10 @@ class FunctionScheduleLoader( dependencies = frozenset({FunctionLoader}) _doc_url = "Function-schedules/operation/postFunctionSchedules" + @property + def display_name(self) -> str: + return "function.schedules" + @classmethod def get_required_capability(cls, items: FunctionScheduleWriteList) -> list[Capability]: return [ @@ -955,6 +960,10 @@ def __init__(self, client: CogniteClient, build_dir: Path): super().__init__(client, build_dir) self._loaded_db_names: set[str] = set() + @property + def display_name(self) -> str: + return "raw.databases" + @classmethod def get_required_capability(cls, items: RawTableList) -> Capability: tables_by_database = defaultdict(list) @@ -1057,6 +1066,10 @@ def __init__(self, client: CogniteClient, build_dir: Path): super().__init__(client, build_dir) self._printed_warning = False + @property + def display_name(self) -> str: + return "raw.tables" + @classmethod def get_required_capability(cls, items: RawTableList) -> Capability: tables_by_database = defaultdict(list) @@ -1632,6 +1645,10 @@ class TransformationScheduleLoader( dependencies = frozenset({TransformationLoader}) _doc_url = "Transformation-Schedules/operation/createTransformationSchedules" + @property + def display_name(self) -> str: + return "transformation.schedules" + @classmethod def get_required_capability(cls, items: TransformationScheduleWriteList) -> list[Capability]: # Access for transformations schedules is checked by the transformation that is deployed @@ -1846,6 +1863,10 @@ class ExtractionPipelineConfigLoader( dependencies = frozenset({ExtractionPipelineLoader}) _doc_url = "Extraction-Pipelines-Config/operation/createExtPipeConfig" + @property + def display_name(self) -> str: + return "extraction_pipeline.config" + @classmethod def get_required_capability(cls, items: ExtractionPipelineConfigWriteList) -> list[Capability]: # Access for extraction pipeline configs is checked by the extraction pipeline that is deployed @@ -1892,7 +1913,10 @@ def _upsert(self, items: ExtractionPipelineConfigWriteList) -> ExtractionPipelin for item in items: if not item.external_id: raise ToolkitRequiredValueError("ExtractionPipelineConfig must have external_id set.") - latest = self.client.extraction_pipelines.config.retrieve(item.external_id) + try: + latest = self.client.extraction_pipelines.config.retrieve(item.external_id) + except CogniteAPIError: + latest = None if latest and self.are_equal(item, latest): updated.append(latest) continue @@ -2250,10 +2274,12 @@ class ContainerLoader( list_cls = ContainerList list_write_cls = ContainerApplyList dependencies = frozenset({SpaceLoader}) - - _display_name = "containers" _doc_url = "Containers/operation/ApplyContainers" + @property + def display_name(self) -> str: + return "containers" + @classmethod def get_required_capability(cls, items: ContainerApplyList) -> Capability: return DataModelsAcl( @@ -2426,8 +2452,6 @@ class ViewLoader(ResourceLoader[ViewId, ViewApply, View, ViewApplyList, ViewList list_cls = ViewList list_write_cls = ViewApplyList dependencies = frozenset({SpaceLoader, ContainerLoader}) - - _display_name = "views" _doc_url = "Views/operation/ApplyViews" def __init__(self, client: CogniteClient, build_dir: Path) -> None: @@ -2435,6 +2459,10 @@ def __init__(self, client: CogniteClient, build_dir: Path) -> None: # Caching to avoid multiple lookups on the same interfaces. self._interfaces_by_id: dict[ViewId, View] = {} + @property + def display_name(self) -> str: + return "views" + @classmethod def get_required_capability(cls, items: ViewApplyList) -> Capability: return DataModelsAcl( @@ -2661,14 +2689,14 @@ def get_write_cls_parameter_spec(cls) -> ParameterSpecSet: @final -class NodeLoader(ResourceContainerLoader[NodeId, LoadedNode, Node, LoadedNodeList, NodeList]): +class NodeLoader(ResourceContainerLoader[NodeId, NodeApply, Node, NodeApplyListWithCall, NodeList]): item_name = "nodes" folder_name = "data_models" filename_pattern = r"^.*\.?(node)$" resource_cls = Node - resource_write_cls = LoadedNode + resource_write_cls = NodeApply list_cls = NodeList - list_write_cls = LoadedNodeList + list_write_cls = NodeApplyListWithCall dependencies = frozenset({SpaceLoader, ViewLoader, ContainerLoader}) _doc_url = "Instances/operation/applyNodeAndEdges" @@ -2677,14 +2705,14 @@ def display_name(self) -> str: return "nodes" @classmethod - def get_required_capability(cls, items: LoadedNodeList) -> Capability: + def get_required_capability(cls, items: NodeApplyListWithCall) -> Capability: return DataModelInstancesAcl( [DataModelInstancesAcl.Action.Read, DataModelInstancesAcl.Action.Write], - DataModelInstancesAcl.Scope.SpaceID(list({item.node.space for item in items})), + DataModelInstancesAcl.Scope.SpaceID(list({item.space for item in items})), ) @classmethod - def get_id(cls, item: LoadedNode | Node | dict) -> NodeId: + def get_id(cls, item: NodeApply | Node | dict) -> NodeId: if isinstance(item, dict): if missing := tuple(k for k in {"space", "externalId"} if k not in item): # We need to raise a KeyError with all missing keys to get the correct error message. @@ -2703,17 +2731,20 @@ def get_dependent_items(cls, item: dict) -> Iterable[tuple[type[ResourceLoader], elif identifier.get("type") == "container" and _in_dict(("space", "externalId"), identifier): yield ContainerLoader, ContainerId(identifier["space"], identifier["externalId"]) - def are_equal(self, local: LoadedNode, cdf_resource: Node) -> bool: + @classmethod + def create_empty_of(cls, items: NodeApplyListWithCall) -> NodeApplyListWithCall: + return NodeApplyListWithCall([], items.api_call) + + def are_equal(self, local: NodeApply, cdf_resource: Node) -> bool: """Comparison for nodes to include properties in the comparison Note this is an expensive operation as we to an extra retrieve to fetch the properties. Thus, the cdf-tk should not be used to upload nodes that are data only nodes used for configuration. """ - local_node = local.node # Note reading from a container is not supported. sources = [ source_prop_pair.source - for source_prop_pair in local_node.sources or [] + for source_prop_pair in local.sources or [] if isinstance(source_prop_pair.source, ViewId) ] try: @@ -2724,7 +2755,7 @@ def are_equal(self, local: LoadedNode, cdf_resource: Node) -> bool: # View does not exist, so node does not exist. return False cdf_resource_dumped = cdf_resource_with_properties.as_write().dump() - local_dumped = local_node.dump() + local_dumped = local.dump() if "existingVersion" not in local_dumped: # Existing version is typically not set when creating nodes, but we get it back # when we retrieve the node from the server. @@ -2732,21 +2763,18 @@ def are_equal(self, local: LoadedNode, cdf_resource: Node) -> bool: return local_dumped == cdf_resource_dumped - def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, skip_validation: bool) -> LoadedNodeList: + def load_resource(self, filepath: Path, ToolGlobals: CDFToolConfig, skip_validation: bool) -> NodeApplyListWithCall: raw = load_yaml_inject_variables(filepath, ToolGlobals.environment_variables()) - if isinstance(raw, dict): - loaded = LoadedNodeList._load(raw, cognite_client=self.client) - else: - raise ValueError(f"Unexpected node yaml file format {filepath.name}") + loaded = NodeApplyListWithCall._load(raw, cognite_client=self.client) if not skip_validation: - ToolGlobals.verify_spaces(list({item.node.space for item in loaded})) + ToolGlobals.verify_spaces(list({item.space for item in loaded})) return loaded def dump_resource( - self, resource: LoadedNode, source_file: Path, local_resource: LoadedNode + self, resource: NodeApply, source_file: Path, local_resource: NodeApply ) -> tuple[dict[str, Any], dict[Path, str]]: - resource_node = resource.node - local_node = local_resource.node + resource_node = resource + local_node = local_resource # Retrieve node again to get properties. view_ids = {source.source for source in local_node.sources or [] if isinstance(source.source, ViewId)} nodes = self.client.data_modeling.instances.retrieve(nodes=local_node.as_id(), sources=list(view_ids)).nodes @@ -2766,26 +2794,18 @@ def dump_resource( return dumped, {} - def create(self, items: LoadedNodeList) -> NodeApplyResultList: - if not isinstance(items, LoadedNodeList): + def create(self, items: NodeApplyListWithCall) -> NodeApplyResultList: + if not isinstance(items, NodeApplyListWithCall): raise ValueError("Unexpected node format file format") - results = NodeApplyResultList([]) - for api_call, item in itertools.groupby(sorted(items, key=lambda x: x.api_call), key=lambda x: x.api_call): - nodes = [node.node for node in item] - result = self.client.data_modeling.instances.apply( - nodes=nodes, - auto_create_direct_relations=api_call.auto_create_direct_relations, - skip_on_version_conflict=api_call.skip_on_version_conflict, - replace=api_call.replace, - ) - results.extend(result.nodes) - return results + api_call_args = items.api_call.dump(camel_case=False) if items.api_call else {} + result = self.client.data_modeling.instances.apply(nodes=items, **api_call_args) + return result.nodes def retrieve(self, ids: SequenceNotStr[NodeId]) -> NodeList: return self.client.data_modeling.instances.retrieve(nodes=cast(Sequence, ids)).nodes - def update(self, items: LoadedNodeList) -> NodeApplyResultList: + def update(self, items: NodeApplyListWithCall) -> NodeApplyResultList: return self.create(items) def delete(self, ids: SequenceNotStr[NodeId]) -> int: @@ -2807,29 +2827,18 @@ def drop_data(self, ids: SequenceNotStr[NodeId]) -> int: @classmethod @lru_cache(maxsize=1) def get_write_cls_parameter_spec(cls) -> ParameterSpecSet: - spec = super().get_write_cls_parameter_spec() - # Modifications to match the spec - for item in spec: - if item.path[0] == "apiCall" and len(item.path) > 1: - # Move up one level - # The spec class is immutable, so we use this trick to modify it. - object.__setattr__(item, "path", item.path[1:]) - elif item.path[0] == "node": - # Move into list - object.__setattr__(item, "path", ("nodes", ANY_INT, *item.path[1:])) - # Top level of nodes - spec.add(ParameterSpec(("nodes",), frozenset({"list"}), is_required=True, _is_nullable=False)) - spec.add( + node_spec = super().get_write_cls_parameter_spec() + # This is a deviation between the SDK and the API + node_spec.add(ParameterSpec(("instanceType",), frozenset({"str"}), is_required=False, _is_nullable=False)) + node_spec.add( ParameterSpec( - ("nodes", ANY_INT, "sources", ANY_INT, "source", "type"), + ("sources", ANY_INT, "source", "type"), frozenset({"str"}), is_required=True, _is_nullable=False, ) ) - # Not used - spec.discard(ParameterSpec(("apiCall",), frozenset({"dict"}), is_required=True, _is_nullable=False)) - return spec + return ParameterSpecSet(node_spec, spec_name=cls.__name__) @final @@ -2912,6 +2921,10 @@ class WorkflowVersionLoader( _doc_base_url = "https://api-docs.cognite.com/20230101-beta/tag/" _doc_url = "Workflow-versions/operation/CreateOrUpdateWorkflowVersion" + @property + def display_name(self) -> str: + return "workflow.versions" + @classmethod def get_required_capability(cls, items: WorkflowVersionUpsertList) -> Capability: return WorkflowOrchestrationAcl( diff --git a/cognite_toolkit/_cdf_tk/load/data_classes.py b/cognite_toolkit/_cdf_tk/load/data_classes.py index 96d590012..4ee4647b3 100644 --- a/cognite_toolkit/_cdf_tk/load/data_classes.py +++ b/cognite_toolkit/_cdf_tk/load/data_classes.py @@ -17,19 +17,18 @@ from abc import ABC from collections import UserDict -from collections.abc import Iterable +from collections.abc import Collection, Iterable from dataclasses import dataclass from functools import total_ordering from typing import Any, Literal from cognite.client import CogniteClient from cognite.client.data_classes._base import ( - CogniteResource, CogniteResourceList, WriteableCogniteResource, WriteableCogniteResourceList, ) -from cognite.client.data_classes.data_modeling import NodeApply, NodeApplyList, NodeId +from cognite.client.data_classes.data_modeling import NodeApply, NodeApplyList from rich.table import Table @@ -94,47 +93,69 @@ def as_db_names(self) -> list[str]: @dataclass(frozen=True, order=True) class NodeAPICall: - auto_create_direct_relations: bool - skip_on_version_conflict: bool - replace: bool + auto_create_direct_relations: bool | None + skip_on_version_conflict: bool | None + replace: bool | None @classmethod def load(cls, resource: dict[str, Any]) -> NodeAPICall: return cls( - auto_create_direct_relations=resource["autoCreateDirectRelations"], - skip_on_version_conflict=resource["skipOnVersionConflict"], - replace=resource["replace"], + auto_create_direct_relations=resource.get("autoCreateDirectRelations"), + skip_on_version_conflict=resource.get("skipOnVersionConflict"), + replace=resource.get("replace"), ) def dump(self, camel_case: bool = True) -> dict[str, Any]: - return { - ( - "autoCreateDirectRelations" if camel_case else "auto_create_direct_relations" - ): self.auto_create_direct_relations, - "skipOnVersionConflict" if camel_case else "skip_on_version_conflict": self.skip_on_version_conflict, - "replace": self.replace, - } - - -@dataclass -class LoadedNode(CogniteResource): - api_call: NodeAPICall - node: NodeApply + output: dict[str, Any] = {} + if self.auto_create_direct_relations is not None: + output["autoCreateDirectRelations" if camel_case else "auto_create_direct_relations"] = ( + self.auto_create_direct_relations + ) + if self.skip_on_version_conflict is not None: + output["skipOnVersionConflict" if camel_case else "skip_on_version_conflict"] = ( + self.skip_on_version_conflict + ) + if self.replace is not None: + output["replace"] = self.replace + return output - def as_id(self) -> NodeId: - return self.node.as_id() +class NodeApplyListWithCall(CogniteResourceList[NodeApply]): + _RESOURCE = NodeApply -class LoadedNodeList(CogniteResourceList[LoadedNode]): - _RESOURCE = LoadedNode + def __init__(self, resources: Collection[Any], api_call: NodeAPICall | None = None) -> None: + super().__init__(resources, cognite_client=None) + self.api_call = api_call @classmethod def _load( # type: ignore[override] - cls, resource: dict[str, Any], cognite_client: CogniteClient | None = None - ) -> LoadedNodeList: - api_call = NodeAPICall.load(resource) - nodes = NodeApplyList.load(resource["nodes"]) - return cls([LoadedNode(api_call, node) for node in nodes]) + cls, resource: dict[str, Any] | list[dict[str, Any]], cognite_client: CogniteClient | None = None + ) -> NodeApplyListWithCall: + api_call: NodeAPICall | None = None + if isinstance(resource, dict) and ("nodes" in resource or "node" in resource): + api_call = NodeAPICall.load(resource) + + if api_call and isinstance(resource, dict) and "nodes" in resource: + nodes = NodeApplyList.load(resource["nodes"]) + elif api_call and isinstance(resource, dict) and "node" in resource: + nodes = NodeApplyList([NodeApply.load(resource["node"])]) + elif isinstance(resource, list): + nodes = NodeApplyList.load(resource) + elif isinstance(resource, dict): + nodes = NodeApplyList([NodeApply.load(resource)]) + else: + raise ValueError("Invalid input for NodeApplyListWithCall") + return cls(nodes, api_call) + + def dump(self, camel_case: bool = True) -> dict[str, Any] | list[dict[str, Any]]: # type: ignore[override] + nodes = [resource.dump(camel_case) for resource in self.data] + if self.api_call is not None: + if len(nodes) == 1: + return {**self.api_call.dump(camel_case), "node": nodes[0]} + else: + return {**self.api_call.dump(camel_case), "nodes": nodes} + else: + return nodes @total_ordering diff --git a/cognite_toolkit/_cdf_tk/prototypes/featureflag.py b/cognite_toolkit/_cdf_tk/prototypes/featureflag.py new file mode 100644 index 000000000..7e8bea58a --- /dev/null +++ b/cognite_toolkit/_cdf_tk/prototypes/featureflag.py @@ -0,0 +1,22 @@ +import os +from functools import lru_cache + +import dotenv + + +@lru_cache(maxsize=128) +def enabled(flag: str) -> bool: + """ + Check if a feature flag is enabled. + + Args: + flag (str): The feature flag to check. + + Returns: + bool: True if the feature flag is enabled, False otherwise. + """ + dotenv.load_dotenv() + if os.environ.get(flag, "false").lower() == "true": + print(f"Feature flag {flag} is enabled.") + return True + return False diff --git a/cognite_toolkit/_cdf_tk/prototypes/interactive_init.py b/cognite_toolkit/_cdf_tk/prototypes/interactive_init.py new file mode 100644 index 000000000..be50bc190 --- /dev/null +++ b/cognite_toolkit/_cdf_tk/prototypes/interactive_init.py @@ -0,0 +1,67 @@ +from typing import Annotated, Optional + +import typer + + +class InteractiveInit(typer.Typer): + def __init__(self, *args, **kwargs) -> None: # type: ignore + super().__init__(*args, **kwargs) + self.command()(self.interactive) + + def interactive( + self, + ctx: typer.Context, + dry_run: Annotated[ + bool, + typer.Option( + "--dry-run", + "-r", + help="Whether to do a dry-run, do dry-run if present.", + ), + ] = False, + upgrade: Annotated[ + bool, + typer.Option( + "--upgrade", + "-u", + help="Will upgrade templates in place without overwriting existing config.yaml and other files.", + ), + ] = False, + git_branch: Annotated[ + Optional[str], + typer.Option( + "--git", + "-g", + help="Will download the latest templates from the git repository branch specified. Use `main` to get the very latest templates.", + ), + ] = None, + no_backup: Annotated[ + bool, + typer.Option( + "--no-backup", + help="Will skip making a backup before upgrading.", + ), + ] = False, + clean: Annotated[ + bool, + typer.Option( + "--clean", + help="Will delete the new_project directory before starting.", + ), + ] = False, + init_dir: Annotated[ + str, + typer.Argument( + help="Directory path to project to initialize or upgrade with templates.", + ), + ] = "new_project", + ) -> None: + """Initialize or upgrade a new CDF project with templates interactively.""" + + print("Initializing or upgrading a new CDF project with templates interactively.") + typer.Exit() + + +command = InteractiveInit( + name="init", help="Initialize or upgrade a new CDF project with templates interactively." +).interactive diff --git a/cognite_toolkit/_cdf_tk/templates/_migration.yaml b/cognite_toolkit/_cdf_tk/templates/_migration.yaml index add6f9a74..d74090967 100644 --- a/cognite_toolkit/_cdf_tk/templates/_migration.yaml +++ b/cognite_toolkit/_cdf_tk/templates/_migration.yaml @@ -1,3 +1,12 @@ +- version: 0.2.0b1 + cognite_modules: {} + resources: {} + tool: + - title: "Parameter 'selected_modules_and_packages' is renamed to 'selected' in config..yaml" + steps: + - For each config, go into the YAML config file and rename 'selected_modules_and_packages' to + 'selected'. + cognite_modules_hash: "" - version: 0.2.0a5 cognite_modules: {} resources: {} @@ -7,7 +16,7 @@ - For each function, go into the YAML config file and rename 'externalIdDataSet' to 'dataSetExternalId'. This is a breaking change. - The motivation for this change is to make the naming consistent with the rest of the Toolkit. - cognite_modules_hash: "" + cognite_modules_hash: "54f7fd5e90879abac473263ea801f29178dfbecf948cbe011c003b06f11ec17f" - version: 0.2.0a4 cognite_modules: {} resources: {} diff --git a/cognite_toolkit/_cdf_tk/templates/data_classes/_config_yaml.py b/cognite_toolkit/_cdf_tk/templates/data_classes/_config_yaml.py index d3d576d22..89f74c5a3 100644 --- a/cognite_toolkit/_cdf_tk/templates/data_classes/_config_yaml.py +++ b/cognite_toolkit/_cdf_tk/templates/data_classes/_config_yaml.py @@ -40,26 +40,28 @@ class Environment: name: str project: str build_type: str - selected_modules_and_packages: list[str | tuple[str, ...]] + selected: list[str | tuple[str, ...]] @classmethod def load(cls, data: dict[str, Any], build_name: str) -> Environment: + _deprecation_selected(data) + try: return Environment( name=build_name, project=data["project"], build_type=data["type"], - selected_modules_and_packages=[ + selected=[ tuple([part for part in selected.split(MODULE_PATH_SEP) if part]) if MODULE_PATH_SEP in selected else selected - for selected in data["selected_modules_and_packages"] or [] + for selected in data["selected"] or [] ], ) except KeyError: raise ToolkitEnvError( "Environment section is missing one or more required fields: 'name', 'project', 'type', or " - f"'selected_modules_and_packages' in {BuildConfigYAML._file_name(build_name)!s}" + f"'selected' in {BuildConfigYAML._file_name(build_name)!s}" ) def dump(self) -> dict[str, Any]: @@ -67,9 +69,9 @@ def dump(self) -> dict[str, Any]: "name": self.name, "project": self.project, "type": self.build_type, - "selected_modules_and_packages": [ + "selected": [ MODULE_PATH_SEP.join(selected) if isinstance(selected, tuple) else selected - for selected in self.selected_modules_and_packages + for selected in self.selected ], } @@ -144,7 +146,7 @@ def create_build_environment(self, hash_by_source_file: dict[Path, str] | None = name=self.environment.name, # type: ignore[arg-type] project=self.environment.project, build_type=self.environment.build_type, - selected_modules_and_packages=self.environment.selected_modules_and_packages, + selected=self.environment.selected, cdf_toolkit_version=__version__, hash_by_source_file=hash_by_source_file or {}, ) @@ -157,7 +159,7 @@ def get_selected_modules( ) -> list[str | tuple[str, ...]]: selected_packages = [ package - for package in self.environment.selected_modules_and_packages + for package in self.environment.selected if package in modules_by_package and isinstance(package, str) ] if verbose: @@ -167,9 +169,7 @@ def get_selected_modules( for package in selected_packages: print(f" {package}") - selected_modules = [ - module for module in self.environment.selected_modules_and_packages if module not in modules_by_package - ] + selected_modules = [module for module in self.environment.selected if module not in modules_by_package] if missing := set(selected_modules) - available_modules: raise ToolkitMissingModuleError(f"The following selected modules are missing, please check path: {missing}") selected_modules.extend( @@ -202,12 +202,13 @@ def load( if build_name is None: raise ValueError("build_name must be specified") version = _load_version_variable(data, BUILD_ENVIRONMENT_FILE) + _deprecation_selected(data) try: return BuildEnvironment( name=build_name, project=data["project"], build_type=data["type"], - selected_modules_and_packages=data["selected_modules_and_packages"], + selected=data["selected"], cdf_toolkit_version=version, hash_by_source_file={Path(file): hash_ for file, hash_ in data.get("source_files", {}).items()}, ) @@ -243,6 +244,15 @@ def check_source_files_changed(self) -> WarningList[FileReadWarning]: return warning_list +def _deprecation_selected(data: dict[str, Any]) -> None: + if "selected_modules_and_packages" in data and "selected" not in data: + print( + " [bold yellow]Warning:[/] In environment section: 'selected_modules_and_packages' " + "is deprecated, use 'selected' instead." + ) + data["selected"] = data.pop("selected_modules_and_packages") + + @dataclass class ConfigEntry: """This represents a single entry in a config.yaml file. diff --git a/cognite_toolkit/_cdf_tk/validation.py b/cognite_toolkit/_cdf_tk/validation.py index 7b0602207..6529cc00d 100644 --- a/cognite_toolkit/_cdf_tk/validation.py +++ b/cognite_toolkit/_cdf_tk/validation.py @@ -9,6 +9,7 @@ from cognite.client.utils._text import to_camel_case, to_snake_case from cognite_toolkit._cdf_tk._parameters import ParameterSpecSet, read_parameters_from_dict +from cognite_toolkit._cdf_tk.load import NodeLoader from cognite_toolkit._cdf_tk.tk_warnings import ( CaseTypoWarning, DataSetMissingWarning, @@ -70,11 +71,28 @@ def validate_data_set_is_set( def validate_resource_yaml( data: dict | list, spec: ParameterSpecSet, source_file: Path, element: int | None = None +) -> WarningList: + if spec.spec_name == NodeLoader.__name__: + # Special case for NodeLoader as it has options for API call parameters + if isinstance(data, list): + return _validate_resource_yaml(data, spec, source_file) + elif isinstance(data, dict) and "node" in data: + return _validate_resource_yaml(data["node"], spec, source_file) + elif isinstance(data, dict) and "nodes" in data: + return _validate_resource_yaml(data["nodes"], spec, source_file) + else: + return _validate_resource_yaml(data, spec, source_file) + else: + return _validate_resource_yaml(data, spec, source_file, element) + + +def _validate_resource_yaml( + data: dict | list, spec: ParameterSpecSet, source_file: Path, element: int | None = None ) -> WarningList: warnings: WarningList = WarningList() if isinstance(data, list): for no, item in enumerate(data, 1): - warnings.extend(validate_resource_yaml(item, spec, source_file, no)) + warnings.extend(_validate_resource_yaml(item, spec, source_file, no)) return warnings elif not isinstance(data, dict): raise NotImplementedError("Note: This function only supports top-level and lists dictionaries.") diff --git a/cognite_toolkit/_system.yaml b/cognite_toolkit/_system.yaml index e3cfec617..13588a1e5 100644 --- a/cognite_toolkit/_system.yaml +++ b/cognite_toolkit/_system.yaml @@ -25,4 +25,4 @@ packages: - example_pump_data_model # This part is used by cdf-toolkit to keep track of the version and help you upgrade. -cdf_toolkit_version: 0.2.0a5 +cdf_toolkit_version: 0.2.0b1 diff --git a/cognite_toolkit/_version.py b/cognite_toolkit/_version.py index 87835b4d3..b95abbc07 100644 --- a/cognite_toolkit/_version.py +++ b/cognite_toolkit/_version.py @@ -1 +1 @@ -__version__ = "0.2.0a5" +__version__ = "0.2.0b1" diff --git a/cognite_toolkit/cognite_modules/examples/my_example_module/timeseries/my_subscription.DatapointSubscription.yaml b/cognite_toolkit/cognite_modules/examples/my_example_module/timeseries/my_subscription.DatapointSubscription.yaml index cf23d06ce..7323c78fc 100644 --- a/cognite_toolkit/cognite_modules/examples/my_example_module/timeseries/my_subscription.DatapointSubscription.yaml +++ b/cognite_toolkit/cognite_modules/examples/my_example_module/timeseries/my_subscription.DatapointSubscription.yaml @@ -1,6 +1,6 @@ externalId: my_subscription name: My Subscription -description: All timeseries with externalId starting with 'ts_value' +description: All timeseries with externalId starting with ts_value partitionCount: 1 filter: prefix: diff --git a/poetry.lock b/poetry.lock index 24167f643..f934b4433 100644 --- a/poetry.lock +++ b/poetry.lock @@ -327,13 +327,13 @@ python-json-logger = ">=2.0.7,<3.0.0" [[package]] name = "cognite-sdk" -version = "7.43.5" +version = "7.44.0" description = "Cognite Python SDK" optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "cognite_sdk-7.43.5-py3-none-any.whl", hash = "sha256:de572f2dad1880744f60bc7674f53d733411523a89d76a92da3972edfe203261"}, - {file = "cognite_sdk-7.43.5.tar.gz", hash = "sha256:12b1394e729062577ae0124f07e339de81a41210779e333f7c82c657262e24ff"}, + {file = "cognite_sdk-7.44.0-py3-none-any.whl", hash = "sha256:ddec530c7637d6ba5ca8a35ed23520eae2be23b89821ab8548c260c0c99ba56c"}, + {file = "cognite_sdk-7.44.0.tar.gz", hash = "sha256:13e6a9aed74efe4224002785e9b8d38d930f3da8b0ad1f7535d236968aa6dccb"}, ] [package.dependencies] @@ -1081,7 +1081,6 @@ optional = false python-versions = ">=3.9" files = [ {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, @@ -1102,7 +1101,6 @@ files = [ {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, @@ -1569,7 +1567,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1577,16 +1574,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1603,7 +1592,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1611,7 +1599,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1983,4 +1970,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "66f0c042a4616c1d0ba06d06cd1436ce8460c630c979317e761f348a03cdb690" +content-hash = "22e5265d1cc0477f314a4794c8071cd4ab54d881dcbbb5ecf936da6dc98f52a7" diff --git a/pyproject.toml b/pyproject.toml index 07505f772..3385605b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "cognite_toolkit" -version = "0.2.0a5" +version = "0.2.0b1" description = "Official Cognite Data Fusion tool for project templates and configuration deployment" authors = ["Cognite AS "] license = "Apache-2" @@ -18,7 +18,7 @@ cognite-sdk = {version = "^7.43.5", extras = ["pandas"]} cognite-extractor-utils = ">=7" pandas = ">=1.5.3, <3.0" pyyaml = "^6.0.1" -typer = {version = "^0.12.0", extras = ["all"]} +typer = {version = ">=0.12.0, <1.0", extras = ["all"]} sentry-sdk = "^2.1.0" cognite-logger = "^0.6" diff --git a/tests/tests_unit/data/project_for_test/_system.yaml b/tests/tests_unit/data/project_for_test/_system.yaml index 3123c8be5..951f51624 100644 --- a/tests/tests_unit/data/project_for_test/_system.yaml +++ b/tests/tests_unit/data/project_for_test/_system.yaml @@ -4,4 +4,4 @@ packages: - child_module # This part is used by cdf-toolkit to keep track of the version and help you upgrade. -cdf_toolkit_version: 0.2.0a5 +cdf_toolkit_version: 0.2.0b1 diff --git a/tests/tests_unit/data/project_no_cognite_modules/_system.yaml b/tests/tests_unit/data/project_no_cognite_modules/_system.yaml index a32326629..22e17a2c0 100644 --- a/tests/tests_unit/data/project_no_cognite_modules/_system.yaml +++ b/tests/tests_unit/data/project_no_cognite_modules/_system.yaml @@ -3,4 +3,4 @@ packages: {} # This part is used by cdf-toolkit to keep track of the version and help you upgrade. -cdf_toolkit_version: 0.2.0a5 +cdf_toolkit_version: 0.2.0b1 diff --git a/tests/tests_unit/data/run_data/_system.yaml b/tests/tests_unit/data/run_data/_system.yaml index e3cfec617..13588a1e5 100644 --- a/tests/tests_unit/data/run_data/_system.yaml +++ b/tests/tests_unit/data/run_data/_system.yaml @@ -25,4 +25,4 @@ packages: - example_pump_data_model # This part is used by cdf-toolkit to keep track of the version and help you upgrade. -cdf_toolkit_version: 0.2.0a5 +cdf_toolkit_version: 0.2.0b1 diff --git a/tests/tests_unit/test_cdf_tk/test_load.py b/tests/tests_unit/test_cdf_tk/test_load.py index 5265014fd..5b7704139 100644 --- a/tests/tests_unit/test_cdf_tk/test_load.py +++ b/tests/tests_unit/test_cdf_tk/test_load.py @@ -1,5 +1,6 @@ import os import pathlib +from collections import Counter from collections.abc import Iterable from pathlib import Path from unittest.mock import MagicMock, patch @@ -18,7 +19,7 @@ Transformation, TransformationSchedule, ) -from cognite.client.data_classes.data_modeling import Edge, Node +from cognite.client.data_classes.data_modeling import Edge, Node, NodeApply from pytest import MonkeyPatch from pytest_regressions.data_regression import DataRegressionFixture @@ -38,12 +39,14 @@ GroupAllScopedLoader, GroupResourceScopedLoader, Loader, + NodeLoader, ResourceLoader, ResourceTypes, TimeSeriesLoader, TransformationLoader, ViewLoader, ) +from cognite_toolkit._cdf_tk.load.data_classes import NodeAPICall, NodeApplyListWithCall from cognite_toolkit._cdf_tk.templates import ( module_from_path, resource_folder_from_path, @@ -535,6 +538,75 @@ def test_if_ambiguous( loader.load_resource(Path("transformation.yaml"), cdf_tool_config_real, skip_validation=False) +class TestNodeLoader: + @pytest.mark.parametrize( + "yamL_raw, expected", + [ + pytest.param( + """space: my_space +externalId: my_external_id""", + NodeApplyListWithCall([NodeApply("my_space", "my_external_id")]), + id="Single node no API call", + ), + pytest.param( + """- space: my_space + externalId: my_first_node +- space: my_space + externalId: my_second_node +""", + NodeApplyListWithCall( + [ + NodeApply("my_space", "my_first_node"), + NodeApply("my_space", "my_second_node"), + ] + ), + id="Multiple nodes no API call", + ), + pytest.param( + """autoCreateDirectRelations: true +skipOnVersionConflict: false +replace: true +node: + space: my_space + externalId: my_external_id""", + NodeApplyListWithCall([NodeApply("my_space", "my_external_id")], NodeAPICall(True, False, True)), + id="Single node with API call", + ), + pytest.param( + """autoCreateDirectRelations: true +skipOnVersionConflict: false +replace: true +nodes: +- space: my_space + externalId: my_first_node +- space: my_space + externalId: my_second_node + """, + NodeApplyListWithCall( + [ + NodeApply("my_space", "my_first_node"), + NodeApply("my_space", "my_second_node"), + ], + NodeAPICall(True, False, True), + ), + id="Multiple nodes with API call", + ), + ], + ) + def test_load_nodes( + self, + yamL_raw: str, + expected: NodeApplyListWithCall, + cdf_tool_config: CDFToolConfig, + monkeypatch: MonkeyPatch, + ) -> None: + loader = NodeLoader.create_loader(cdf_tool_config, None) + mock_read_yaml_file({"my_node.yaml": yaml.safe_load(yamL_raw)}, monkeypatch) + loaded = loader.load_resource(Path("my_node.yaml"), cdf_tool_config, skip_validation=True) + + assert loaded.dump() == expected.dump() + + class TestExtractionPipelineDependencies: _yaml = """ externalId: 'ep_src_asset_hamburg_sap' @@ -627,7 +699,7 @@ def test_deploy_resource_order(self, cognite_client_approval: ApprovalCogniteCli build_env_name = "dev" system_config = SystemYAML.load_from_directory(PYTEST_PROJECT, build_env_name) config = BuildConfigYAML.load_from_directory(PYTEST_PROJECT, build_env_name) - config.environment.selected_modules_and_packages = ["another_module"] + config.environment.selected = ["another_module"] build_cmd = BuildCommand() build_cmd.build_config( BUILD_DIR, PYTEST_PROJECT, config=config, system_config=system_config, clean=True, verbose=False @@ -748,12 +820,12 @@ def cognite_module_files_with_loader() -> Iterable[ParameterSet]: name="not used", project=os.environ.get("CDF_PROJECT", ""), build_type="dev", - selected_modules_and_packages=[], + selected=[], ) ).load_defaults(source_path) config = config_init.as_build_config() config.set_environment_variables() - config.environment.selected_modules_and_packages = config.available_modules + config.environment.selected = config.available_modules source_by_build_path = BuildCommand().build_config( build_dir=build_dir, @@ -813,3 +885,14 @@ def test_write_cls_spec_against_cognite_modules(self, loader_cls: type[ResourceL warnings = validate_resource_yaml(content, spec, Path("test.yaml")) assert sorted(warnings) == [] + + +class TestLoaders: + def test_unique_display_names(self, cdf_tool_config: CDFToolConfig): + name_by_count = Counter( + [loader_cls.create_loader(cdf_tool_config, None).display_name for loader_cls in LOADER_LIST] + ) + + duplicates = {name: count for name, count in name_by_count.items() if count > 1} + + assert not duplicates, f"Duplicate display names: {duplicates}" diff --git a/tests/tests_unit/test_cdf_tk/test_templates.py b/tests/tests_unit/test_cdf_tk/test_templates.py index fc59cd25b..3111ccad3 100644 --- a/tests/tests_unit/test_cdf_tk/test_templates.py +++ b/tests/tests_unit/test_cdf_tk/test_templates.py @@ -46,7 +46,7 @@ def dummy_environment() -> Environment: name="dev", project="my_project", build_type="dev", - selected_modules_and_packages=["none"], + selected=["none"], ) @@ -319,7 +319,7 @@ def test_build_config_create_valid_build_folder(self, config_yaml: str) -> None: system_config = SystemYAML.load_from_directory(PYTEST_PROJECT, build_env_name) config = BuildConfigYAML.load_from_directory(PYTEST_PROJECT, build_env_name) available_modules = {module.name for module, _ in iterate_modules(PYTEST_PROJECT)} - config.environment.selected_modules_and_packages = list(available_modules) + config.environment.selected = list(available_modules) BuildCommand().build_config( BUILD_DIR, PYTEST_PROJECT, config=config, system_config=system_config, clean=True, verbose=False diff --git a/tests/tests_unit/test_cli/test_behavior.py b/tests/tests_unit/test_cli/test_behavior.py index 6edd800a2..c27f1d63e 100644 --- a/tests/tests_unit/test_cli/test_behavior.py +++ b/tests/tests_unit/test_cli/test_behavior.py @@ -65,7 +65,7 @@ def test_duplicated_modules(build_tmp_path: Path, typer_context: typer.Context) config = MagicMock(spec=BuildConfigYAML) config.environment = MagicMock(spec=Environment) config.environment.name = "dev" - config.environment.selected_modules_and_packages = ["module1"] + config.environment.selected = ["module1"] with pytest.raises(ToolkitDuplicatedModuleError) as err: BuildCommand().build_config( build_dir=build_tmp_path, diff --git a/tests/tests_unit/test_cli/test_build_deploy_snapshots/my_example_module.yaml b/tests/tests_unit/test_cli/test_build_deploy_snapshots/my_example_module.yaml index c6affaabf..a6fef4447 100644 --- a/tests/tests_unit/test_cli/test_build_deploy_snapshots/my_example_module.yaml +++ b/tests/tests_unit/test_cli/test_build_deploy_snapshots/my_example_module.yaml @@ -6,7 +6,7 @@ DataSet: name: Example dataset. writeProtected: false DatapointSubscription: -- description: All timeseries with externalId starting with 'ts_value' +- description: All timeseries with externalId starting with ts_value externalId: my_subscription filter: prefix: diff --git a/tests_migrations/constants.py b/tests_migrations/constants.py index f5fb3b825..a8b6e26dc 100644 --- a/tests_migrations/constants.py +++ b/tests_migrations/constants.py @@ -14,6 +14,7 @@ "0.2.0a2", "0.2.0a3", "0.2.0a4", + "0.2.0a5", ]