diff --git a/.gitignore b/.gitignore index bb0c502bf..9b0a24e90 100644 --- a/.gitignore +++ b/.gitignore @@ -173,7 +173,7 @@ celerybeat.pid *.sage.py # Environments -.env +*.env .venv .local/ env/ diff --git a/CHANGELOG.cdf-tk.md b/CHANGELOG.cdf-tk.md index 3a0df1b49..571eed87b 100644 --- a/CHANGELOG.cdf-tk.md +++ b/CHANGELOG.cdf-tk.md @@ -15,6 +15,13 @@ Changes are grouped as follows: - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. +## [0.2.11] - 2024-07-05 + +### Fixed + +- When running `cdf-tk build`, if you had two files non-YAML files named the same in different modules, or subdirectories + in the same module, the Toolkit would overwrite the first file with the second file. This is now fixed. + ## [0.2.10] - 2024-07-03 ### Fixed diff --git a/CHANGELOG.templates.md b/CHANGELOG.templates.md index dfa9d885d..8b6f61751 100644 --- a/CHANGELOG.templates.md +++ b/CHANGELOG.templates.md @@ -15,6 +15,10 @@ Changes are grouped as follows: - `Fixed` for any bug fixes. - `Security` in case of vulnerabilities. +## [0.2.11] - 2024-07-05 + +No changes to templates. + ## [0.2.10] - 2024-07-03 No changes to templates. diff --git a/cognite_toolkit/_cdf_tk/_migration.yaml b/cognite_toolkit/_cdf_tk/_migration.yaml index 00412fdeb..197e3d5ff 100644 --- a/cognite_toolkit/_cdf_tk/_migration.yaml +++ b/cognite_toolkit/_cdf_tk/_migration.yaml @@ -1,8 +1,13 @@ -- version: 0.2.10 +- version: 0.2.11 cognite_modules: {} resources: {} tool: {} cognite_modules_hash: "" +- version: 0.2.10 + cognite_modules: {} + resources: {} + tool: {} + cognite_modules_hash: "a9440375346d12c63daa0ee0ae7d76b6251503395328f7f04b31e566c64d655a" - version: 0.2.9 cognite_modules: {} resources: {} diff --git a/cognite_toolkit/_cdf_tk/commands/build.py b/cognite_toolkit/_cdf_tk/commands/build.py index e1ef82f31..b324b2bb2 100644 --- a/cognite_toolkit/_cdf_tk/commands/build.py +++ b/cognite_toolkit/_cdf_tk/commands/build.py @@ -8,7 +8,7 @@ import shutil import sys import traceback -from collections import ChainMap, defaultdict +from collections import ChainMap, Counter, defaultdict from collections.abc import Hashable, Mapping, Sequence from dataclasses import dataclass, field from pathlib import Path @@ -24,7 +24,7 @@ from cognite_toolkit._cdf_tk.commands._base import ToolkitCommand from cognite_toolkit._cdf_tk.constants import ( _RUNNING_IN_BROWSER, - EXCL_INDEX_SUFFIX, + INDEX_PATTERN, ROOT_MODULES, TEMPLATE_VARS_FILE_SUFFIXES, ) @@ -219,9 +219,11 @@ def process_config_files( for resource_directory_name, directory_files in files_by_resource_directory.items(): build_folder: list[Path] = [] for source_path in directory_files.resource_files: - destination = self._replace_variables_validate_to_build_directory( - source_path, resource_directory_name, state, build_dir, verbose + destination = state.create_destination_path( + source_path, resource_directory_name, module_dir, build_dir ) + + self._replace_variables_validate_to_build_directory(source_path, destination, state, verbose) build_folder.append(destination) if resource_directory_name == FunctionLoader.folder_name: @@ -236,13 +238,16 @@ def process_config_files( self.copy_files_to_upload_to_build_directory( file_to_upload=directory_files.other_files, resource_files_build_folder=build_folder, + state=state, module_dir=module_dir, build_dir=build_dir, verbose=verbose, ) else: for source_path in directory_files.other_files: - destination = build_dir / resource_directory_name / source_path.name + destination = state.create_destination_path( + source_path, resource_directory_name, module_dir, build_dir + ) destination.parent.mkdir(parents=True, exist_ok=True) if ( resource_directory_name == DatapointsLoader.folder_name @@ -290,27 +295,26 @@ def _validate_function_directory( ) def _replace_variables_validate_to_build_directory( - self, source_path: Path, resource_directory: str, state: _BuildState, build_dir: Path, verbose: bool - ) -> Path: + self, source_path: Path, destination_path: Path, state: _BuildState, verbose: bool + ) -> None: if verbose: print(f" [bold green]INFO:[/] Processing {source_path.name}") - destination = build_dir / resource_directory / state.create_file_name(source_path, resource_directory) - destination.parent.mkdir(parents=True, exist_ok=True) + + destination_path.parent.mkdir(parents=True, exist_ok=True) content = source_path.read_text() state.hash_by_source_path[source_path] = calculate_str_or_file_hash(content) content = state.replace_variables(content, source_path.suffix) - destination.write_text(content) - state.source_by_build_path[destination] = source_path + destination_path.write_text(content) + state.source_by_build_path[destination_path] = source_path - file_warnings = self.validate(content, source_path, destination, state, verbose) + file_warnings = self.validate(content, source_path, destination_path, state, verbose) if file_warnings: self.warning_list.extend(file_warnings) # Here we do not use the self.warn method as we want to print the warnings as a group. if self.print_warning: print(str(file_warnings)) - return destination def _check_missing_dependencies(self, state: _BuildState, project_config_dir: Path) -> None: existing = {(resource_cls, id_) for resource_cls, ids in state.ids_by_resource_type.items() for id_ in ids} @@ -372,11 +376,18 @@ def _is_selected_module(relative_module_dir: Path, selected_modules: list[str | def _to_files_by_resource_directory(self, filepaths: list[Path], module_dir: Path) -> dict[str, ResourceDirectory]: # Sort to support 1., 2. etc prefixes - def sort_key(p: Path) -> int: - if result := re.findall(r"^(\d+)", p.stem): - return int(result[0]) + def sort_key(p: Path) -> tuple[int, int, str]: + first = { + ".yaml": 0, + ".yml": 0, + }.get(p.suffix.lower(), 1) + # We ensure that the YAML files are sorted before other files. + # This is when we add indexes to files. We want to ensure that, for example, a .sql file + # with the same name as a .yaml file gets the same index as the .yaml file. + if result := INDEX_PATTERN.search(p.stem): + return first, int(result.group()[:-1]), p.name else: - return len(filepaths) + return first, len(filepaths) + 1, p.name # The builder of a module can control the order that resources are deployed by prefixing a number # The custom key 'sort_key' is to get the sort on integer and not the string. @@ -524,6 +535,7 @@ def _read_function_path_by_external_id( def copy_files_to_upload_to_build_directory( file_to_upload: list[Path], resource_files_build_folder: list[Path], + state: _BuildState, module_dir: Path, build_dir: Path, verbose: bool = False, @@ -542,8 +554,8 @@ def copy_files_to_upload_to_build_directory( destination_stem = filepath.stem if template_name: destination_stem = template_name.replace(FileMetadataLoader.template_pattern, filepath.stem) - destination = build_dir / FileLoader.folder_name / f"{destination_stem}{filepath.suffix}" - destination.parent.mkdir(parents=True, exist_ok=True) + new_source = filepath.parent / f"{destination_stem}{filepath.suffix}" + destination = state.create_destination_path(new_source, FileLoader.folder_name, module_dir, build_dir) shutil.copyfile(filepath, destination) @staticmethod @@ -731,7 +743,8 @@ class _BuildState: variables_by_module_path: dict[str, dict[str, str]] = field(default_factory=dict) source_by_build_path: dict[Path, Path] = field(default_factory=dict) hash_by_source_path: dict[Path, str] = field(default_factory=dict) - number_by_resource_type: dict[str, int] = field(default_factory=lambda: defaultdict(int)) + index_by_resource_type_counter: Counter[str] = field(default_factory=Counter) + index_by_filepath_stem: dict[Path, int] = field(default_factory=dict) printed_function_warning: bool = False ids_by_resource_type: dict[type[ResourceLoader], dict[Hashable, Path]] = field( default_factory=lambda: defaultdict(dict) @@ -749,15 +762,35 @@ def local_variables(self) -> Mapping[str, str]: def update_local_variables(self, module_dir: Path) -> None: self._local_variables = _Helpers.create_local_config(self.variables_by_module_path, module_dir) - def create_file_name(self, filepath: Path, resource_directory: str) -> str: - filename = filepath.name - if filepath.suffix in EXCL_INDEX_SUFFIX: - return filename + def create_destination_path( + self, source_path: Path, resource_directory: str, module_dir: Path, build_dir: Path + ) -> Path: + """Creates the filepath in the build directory for the given source path. + + Note that this is a complex operation as the modules in the source are nested while the build directory is flat. + This means that we lose information and risk having duplicate filenames. To avoid this, we prefix the filename + with a number to ensure uniqueness. + """ + filename = source_path.name # Get rid of the local index - filename = re.sub("^[0-9]+\\.", "", filename) - self.number_by_resource_type[resource_directory] += 1 - filename = f"{self.number_by_resource_type[resource_directory]}.{filename}" - return filename + filename = INDEX_PATTERN.sub("", filename) + + relative_stem = module_dir.name / source_path.relative_to(module_dir).parent / source_path.stem + if relative_stem in self.index_by_filepath_stem: + # Ensure extra files (.sql, .pdf) with the same stem gets the same index as the + # main YAML file. The Transformation Loader expects this. + index = self.index_by_filepath_stem[relative_stem] + else: + # Increment to ensure we do not get duplicate filenames when we flatten the file + # structure from the module to the build directory. + self.index_by_resource_type_counter[resource_directory] += 1 + index = self.index_by_resource_type_counter[resource_directory] + self.index_by_filepath_stem[relative_stem] = index + + filename = f"{index}.{filename}" + destination_path = build_dir / resource_directory / filename + destination_path.parent.mkdir(parents=True, exist_ok=True) + return destination_path def replace_variables(self, content: str, file_suffix: str = ".yaml") -> str: for name, variable in self.local_variables.items(): diff --git a/cognite_toolkit/_cdf_tk/constants.py b/cognite_toolkit/_cdf_tk/constants.py index 9a9d639f2..c5fe269c5 100644 --- a/cognite_toolkit/_cdf_tk/constants.py +++ b/cognite_toolkit/_cdf_tk/constants.py @@ -1,3 +1,4 @@ +import re from pathlib import Path try: @@ -27,8 +28,6 @@ # Add any other files below that should be included in a build EXCL_FILES = ["README.md", DEFAULT_CONFIG_FILE] -# Which suffixes to exclude when we create indexed files (i.e., they are bundled with their main config file) -EXCL_INDEX_SUFFIX = frozenset([".sql", ".csv", ".parquet"]) # Files to search for variables. SEARCH_VARIABLES_SUFFIX = frozenset([".yaml", "yml", ".sql", ".csv"]) # Which files to process for template variable replacement @@ -39,6 +38,9 @@ COGNITE_MODULES_PATH = ROOT_PATH / COGNITE_MODULES SUPPORT_MODULE_UPGRADE_FROM_VERSION = "0.1.0" +# This is used in the build directory to keep track of order and flatten the +# module directory structure with accounting for duplicated names. +INDEX_PATTERN = re.compile("^[0-9]+\\.") class URL: diff --git a/cognite_toolkit/_cdf_tk/loaders/_data_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_data_loaders.py index 4265f67fb..c1e85ad45 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_data_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_data_loaders.py @@ -11,6 +11,7 @@ from cognite.client.data_classes import FileMetadataWrite, FileMetadataWriteList, capabilities from cognite.client.data_classes.capabilities import Capability, FilesAcl, RawAcl, TimeSeriesAcl +from cognite_toolkit._cdf_tk.constants import INDEX_PATTERN from cognite_toolkit._cdf_tk.utils import CDFToolConfig from ._base_loaders import DataLoader @@ -104,11 +105,11 @@ def upload(self, datafile: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> t elif isinstance(loaded, FileMetadataWriteList): self.meta_data_list.extend(loaded) self.has_loaded_metadata = True - - meta_data = next((meta for meta in self.meta_data_list if meta.name == datafile.name), None) + source_file_name = INDEX_PATTERN.sub("", datafile.name) + meta_data = next((meta for meta in self.meta_data_list if meta.name == source_file_name), None) if meta_data is None: raise ValueError( - f"Missing metadata for file {datafile.name}. Please provide a yaml file with metadata " + f"Missing metadata for file {source_file_name}. Please provide a yaml file with metadata " "with an entry with the same name." ) external_id = meta_data.external_id @@ -137,7 +138,7 @@ def get_required_capability(cls, ToolGlobals: CDFToolConfig) -> Capability: return RawAcl([RawAcl.Action.Read, RawAcl.Action.Write], RawAcl.Scope.All()) def upload(self, datafile: Path, ToolGlobals: CDFToolConfig, dry_run: bool) -> tuple[str, int]: - pattern = re.compile(rf"^(\d+\.)?{datafile.stem}\.(yml|yaml)$") + pattern = re.compile(rf"{datafile.stem}\.(yml|yaml)$") metadata_file = next((filepath for filepath in datafile.parent.glob("*") if pattern.match(filepath.name)), None) if metadata_file is not None: raw = yaml.safe_load(metadata_file.read_text()) diff --git a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders.py b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders.py index 897b2e3d1..ea921c8ec 100644 --- a/cognite_toolkit/_cdf_tk/loaders/_resource_loaders.py +++ b/cognite_toolkit/_cdf_tk/loaders/_resource_loaders.py @@ -15,7 +15,6 @@ import itertools import json -import re from abc import ABC from collections import defaultdict from collections.abc import Callable, Hashable, Iterable, Sequence, Sized @@ -159,6 +158,7 @@ from rich import print from cognite_toolkit._cdf_tk._parameters import ANY_INT, ANY_STR, ANYTHING, ParameterSpec, ParameterSpecSet +from cognite_toolkit._cdf_tk.constants import INDEX_PATTERN from cognite_toolkit._cdf_tk.exceptions import ( ToolkitFileNotFoundError, ToolkitInvalidParameterNameError, @@ -1797,13 +1797,21 @@ def _are_equal( return self._return_are_equal(local_dumped, cdf_dumped, return_dumped) - def _get_query_file(self, filepath: Path, transformation_external_id: str | None) -> Path | None: - file_name = re.sub(r"\d+\.", "", filepath.stem) - query_file = filepath.parent / f"{file_name}.sql" + @staticmethod + def _get_query_file(filepath: Path, transformation_external_id: str | None) -> Path | None: + query_file = filepath.parent / f"{filepath.stem}.sql" if not query_file.exists() and transformation_external_id: - query_file = filepath.parent / f"{transformation_external_id}.sql" - if not query_file.exists(): + found_query_file = next( + ( + f + for f in filepath.parent.iterdir() + if f.is_file() and f.name.endswith(f"{transformation_external_id}.sql") + ), + None, + ) + if found_query_file is None: return None + query_file = found_query_file return query_file def load_resource( @@ -2501,6 +2509,13 @@ def load_resource( self, filepath: Path, ToolGlobals: CDFToolConfig, skip_validation: bool ) -> FileMetadataWrite | FileMetadataWriteList: loaded = load_yaml_inject_variables(filepath, ToolGlobals.environment_variables()) + + file_to_upload_by_source_name: dict[str, Path] = { + INDEX_PATTERN.sub("", file.name): file + for file in filepath.parent.glob("*") + if file.suffix not in {".yaml", ".yml"} + } + is_file_template = ( isinstance(loaded, list) and len(loaded) == 1 and "$FILENAME" in loaded[0].get("externalId", "") ) @@ -2511,16 +2526,13 @@ def load_resource( if "name" in template and "$FILENAME" in template["name"]: template_prefix, template_suffix = template["name"].split("$FILENAME", maxsplit=1) loaded_list: list[dict[str, Any]] = [] - for file in filepath.parent.glob("*"): - if file.suffix in [".yaml", ".yml"]: - continue + for source_name, file in file_to_upload_by_source_name.items(): # Deep Copy new_file = json.loads(json.dumps(template)) + # We modify the filename in the build command, we clean the name here to get the original filename - filename_in_module = ( - re.sub("^[0-9]+\\.", "", file.name).removeprefix(template_prefix).removesuffix(template_suffix) - ) - new_file["name"] = file.name + filename_in_module = source_name.removeprefix(template_prefix).removesuffix(template_suffix) + new_file["name"] = source_name new_file["externalId"] = new_file["externalId"].replace("$FILENAME", filename_in_module) loaded_list.append(new_file) @@ -2546,8 +2558,8 @@ def load_resource( files_metadata: FileMetadataWriteList = FileMetadataWriteList.load(loaded_list) for meta in files_metadata: - if meta.name and not Path(filepath.parent / meta.name).exists(): - raise ToolkitFileNotFoundError(f"Could not find file {meta.name} referenced " f"in filepath {filepath}") + if meta.name and meta.name not in file_to_upload_by_source_name: + raise ToolkitFileNotFoundError(f"Could not find file {meta.name} referenced in filepath {filepath}") return files_metadata def _are_equal( diff --git a/cognite_toolkit/_system.yaml b/cognite_toolkit/_system.yaml index ba910acd1..479904b62 100644 --- a/cognite_toolkit/_system.yaml +++ b/cognite_toolkit/_system.yaml @@ -25,4 +25,4 @@ packages: - example_pump_data_model # This part is used by cdf-toolkit to keep track of the version and help you upgrade. -cdf_toolkit_version: 0.2.10 \ No newline at end of file +cdf_toolkit_version: 0.2.11 \ No newline at end of file diff --git a/cognite_toolkit/_version.py b/cognite_toolkit/_version.py index 229d8d488..d9498e143 100644 --- a/cognite_toolkit/_version.py +++ b/cognite_toolkit/_version.py @@ -1 +1 @@ -__version__ = "0.2.10" +__version__ = "0.2.11" diff --git a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_models/3.LiftStation.view.yaml b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_models/3.LiftStation.view.yaml index 9778de0c5..0cedba2ad 100644 --- a/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_models/3.LiftStation.view.yaml +++ b/cognite_toolkit/cognite_modules/experimental/example_pump_data_model/data_models/3.LiftStation.view.yaml @@ -13,6 +13,7 @@ implements: version: v1 properties: pumps: + connectionType: multi_edge_connection type: space: '{{model_space}}' externalId: LiftStation.pumps diff --git a/poetry.lock b/poetry.lock index 5a42e51bd..cec512a0e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -328,13 +328,13 @@ python-json-logger = ">=2.0.7,<3.0.0" [[package]] name = "cognite-sdk" -version = "7.52.3" +version = "7.53.2" description = "Cognite Python SDK" optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "cognite_sdk-7.52.3-py3-none-any.whl", hash = "sha256:eff71cd93e1d5b7ac78ac99b263671246bab3705c2e6ed000d437e77573f19ab"}, - {file = "cognite_sdk-7.52.3.tar.gz", hash = "sha256:893992d9ee385e0c4224278780209c849a1157bb6bbde62a698eec67d5d58503"}, + {file = "cognite_sdk-7.53.2-py3-none-any.whl", hash = "sha256:90cf012ff854b0821df873f8cd132884e82fbd760a35aff42c166f72e74cef97"}, + {file = "cognite_sdk-7.53.2.tar.gz", hash = "sha256:514c4e6048f563438852868b5034fb71c7764ebfcb16b1c2f09675b496815f37"}, ] [package.dependencies] @@ -1301,6 +1301,7 @@ optional = false python-versions = ">=3.9" files = [ {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, @@ -1321,6 +1322,7 @@ files = [ {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, @@ -1817,6 +1819,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1824,8 +1827,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1842,6 +1853,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1849,6 +1861,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, diff --git a/pyproject.toml b/pyproject.toml index 90c9a99c8..2f0cc067b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "cognite_toolkit" -version = "0.2.10" +version = "0.2.11" description = "Official Cognite Data Fusion tool for project templates and configuration deployment" authors = ["Cognite AS "] license = "Apache-2" @@ -46,6 +46,7 @@ fastparquet = "^2024.5.0" [tool.poetry.scripts] cdf-tk = "cognite_toolkit._cdf:app" +cdf = "cognite_toolkit._cdf:app" [tool.coverage.report] show_missing = true diff --git a/tests/data/project_for_test/_system.yaml b/tests/data/project_for_test/_system.yaml index 04794fc54..60d88b9e8 100644 --- a/tests/data/project_for_test/_system.yaml +++ b/tests/data/project_for_test/_system.yaml @@ -4,4 +4,4 @@ packages: - child_module # This part is used by cdf-toolkit to keep track of the version and help you upgrade. -cdf_toolkit_version: 0.2.10 +cdf_toolkit_version: 0.2.11 diff --git a/tests/data/project_no_cognite_modules/_system.yaml b/tests/data/project_no_cognite_modules/_system.yaml index 18304cad7..606ac8c16 100644 --- a/tests/data/project_no_cognite_modules/_system.yaml +++ b/tests/data/project_no_cognite_modules/_system.yaml @@ -3,4 +3,4 @@ packages: {} # This part is used by cdf-toolkit to keep track of the version and help you upgrade. -cdf_toolkit_version: 0.2.10 +cdf_toolkit_version: 0.2.11 diff --git a/tests/data/project_with_bad_modules/_system.yaml b/tests/data/project_with_bad_modules/_system.yaml index 18304cad7..606ac8c16 100644 --- a/tests/data/project_with_bad_modules/_system.yaml +++ b/tests/data/project_with_bad_modules/_system.yaml @@ -3,4 +3,4 @@ packages: {} # This part is used by cdf-toolkit to keep track of the version and help you upgrade. -cdf_toolkit_version: 0.2.10 +cdf_toolkit_version: 0.2.11 diff --git a/tests/data/run_data/_system.yaml b/tests/data/run_data/_system.yaml index 6b60c3a3a..2a84da058 100644 --- a/tests/data/run_data/_system.yaml +++ b/tests/data/run_data/_system.yaml @@ -25,4 +25,4 @@ packages: - example_pump_data_model # This part is used by cdf-toolkit to keep track of the version and help you upgrade. -cdf_toolkit_version: 0.2.10 +cdf_toolkit_version: 0.2.11 diff --git a/tests/tests_integration/constants.py b/tests/tests_integration/constants.py index 407a15761..c35ad7639 100644 --- a/tests/tests_integration/constants.py +++ b/tests/tests_integration/constants.py @@ -5,4 +5,4 @@ # This is needed as we run tests for two different versions of Python in parallel. # The platform.system() is not used, but is here in case we start testing on Windows as well. # The random number is to avoid conflicts when running tests in parallel (for example, two PRs). -RUN_UNIQUE_ID = f"{platform.system()}_{sys.version_info.major}_{sys.version_info.minor}_{random.randint(0, 10_000)!s}" +RUN_UNIQUE_ID = f"{platform.system()}_{sys.version_info.major}_{sys.version_info.minor}_{random.randint(0, 10)!s}" diff --git a/tests/tests_unit/approval_client/client.py b/tests/tests_unit/approval_client/client.py index d76854b6e..d78df136d 100644 --- a/tests/tests_unit/approval_client/client.py +++ b/tests/tests_unit/approval_client/client.py @@ -54,6 +54,8 @@ from cognite.client.utils._text import to_camel_case from requests import Response +from cognite_toolkit._cdf_tk.constants import INDEX_PATTERN + from .config import API_RESOURCES from .data_classes import APIResource, AuthGroupCalls @@ -394,11 +396,16 @@ def upload(*args, **kwargs) -> None: name = "" for k, v in kwargs.items(): if isinstance(v, Path) or (isinstance(v, str) and Path(v).exists()): + # The index pattern is used to ensure unique names. This index + # is removed as we do not care whether the order of the files are uploaded + filepath = Path(v) + filepath = filepath.with_name(INDEX_PATTERN.sub("", filepath.name)) + try: - kwargs[k] = "/".join(Path(v).relative_to(TEST_FOLDER).parts) + kwargs[k] = "/".join(filepath.relative_to(TEST_FOLDER).parts) except ValueError: - kwargs[k] = "/".join(Path(v).parts) - name = Path(v).name + kwargs[k] = "/".join(filepath.parts) + name = filepath.name created_resources[resource_cls.__name__].append( { diff --git a/tests/tests_unit/test_cdf_tk/test_commands/test_build.py b/tests/tests_unit/test_cdf_tk/test_commands/test_build.py index 1792ecdd6..fdc49022e 100644 --- a/tests/tests_unit/test_cdf_tk/test_commands/test_build.py +++ b/tests/tests_unit/test_cdf_tk/test_commands/test_build.py @@ -87,6 +87,8 @@ def test_custom_project_no_warnings(self, tmp_path: Path, monkeypatch: MonkeyPat if f.is_file() and TransformationLoader.is_supported_file(f) ] assert len(transformation_files) == 2 + sql_files = [f for f in (tmp_path / "transformations").iterdir() if f.is_file() and f.suffix == ".sql"] + assert len(sql_files) == 2 def valid_yaml_semantics_test_cases() -> Iterable[pytest.ParameterSet]: diff --git a/tests/tests_unit/test_cdf_tk/test_loaders/test_file_metadata_loader.py b/tests/tests_unit/test_cdf_tk/test_loaders/test_file_metadata_loader.py index 75e241483..c09ad8e40 100644 --- a/tests/tests_unit/test_cdf_tk/test_loaders/test_file_metadata_loader.py +++ b/tests/tests_unit/test_cdf_tk/test_loaders/test_file_metadata_loader.py @@ -42,18 +42,18 @@ def file_metadata_config_cases() -> Iterable[ParameterSet]: yield pytest.param( """externalId: sharepointABC -name: A file +name: A file.txt dataSetExternalId: ds_files source: sharepointABC """, - [], + ["1.A file.txt"], data_set_mapping, FileMetadataWriteList( [ FileMetadataWrite( external_id="sharepointABC", source="sharepointABC", - name="A file", + name="A file.txt", data_set_id=42, ) ] @@ -62,28 +62,28 @@ def file_metadata_config_cases() -> Iterable[ParameterSet]: ) yield pytest.param( """- externalId: sharepointABC - name: A file + name: A file.txt dataSetExternalId: ds_files source: sharepointABC - externalId: sharepointABC2 - name: Another file + name: Another file.txt dataSetExternalId: ds_files source: sharepointABC """, - [], + ["1.A file.txt", "1.Another file.txt"], data_set_mapping, FileMetadataWriteList( [ FileMetadataWrite( external_id="sharepointABC", source="sharepointABC", - name="A file", + name="A file.txt", data_set_id=42, ), FileMetadataWrite( external_id="sharepointABC2", source="sharepointABC", - name="Another file", + name="Another file.txt", data_set_id=42, ), ] @@ -109,7 +109,6 @@ def test_load_resources( filepath.parent.glob.return_value = [Path(f) for f in files] cdf_tool = CDFToolConfig(skip_initialization=True) cdf_tool._cache.data_set_id_by_external_id = data_set_mapping - monkeypatch.setattr("cognite_toolkit._cdf_tk.loaders._resource_loaders.Path", self.always_existing_path) resources = loader.load_resource(filepath, cdf_tool, skip_validation=False)