Skip to content

Commit

Permalink
Release 0.2.0 (#633)
Browse files Browse the repository at this point in the history
# Description

Please describe the change you have made.

## Checklist

- [ ] Tests added/updated.
- [ ] Run Demo Job Locally.
- [ ] Documentation updated.
- [ ] Changelogs updated in
[CHANGELOG.cdf-tk.md](https://github.com/cognitedata/toolkit/blob/main/CHANGELOG.cdf-tk.md).
- [ ] Template changelogs updated in
[CHANGELOG.templates.md](https://github.com/cognitedata/toolkit/blob/main/CHANGELOG.templates.md).
- [ ] Version bumped.

[_version.py](https://github.com/cognitedata/toolkit/blob/main/cognite/cognite_toolkit/_version.py)
and

[pyproject.toml](https://github.com/cognitedata/toolkit/blob/main/pyproject.toml)
per [semantic versioning](https://semver.org/).
  • Loading branch information
doctrino authored Jun 10, 2024
2 parents f0ad9cf + 641307a commit 038f435
Show file tree
Hide file tree
Showing 29 changed files with 340 additions and 163 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ repos:
- id: ruff-format
args:
- --line-length=120
rev: v0.4.7
rev: v0.4.8

- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
Expand Down
9 changes: 9 additions & 0 deletions CHANGELOG.cdf-tk.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,15 @@ Changes are grouped as follows:
- `Fixed` for any bug fixes.
- `Security` in case of vulnerabilities.

## [0.2.0] - 2024-06-10

### Fixed

- When running `cdf-tk clean` or `cdf-tk deploy --drop --drop-data` there was an edge case that triggered the bug
`ValueError: No capabilities given`. This is now fixed.
- When deploying `containers` resources with an index, the `cdf-tk deploy` would consider the resource as changed
even though it was not. This is now fixed.

## [0.2.0b4] - 2024-06-06

### Added
Expand Down
4 changes: 4 additions & 0 deletions CHANGELOG.templates.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@ Changes are grouped as follows:
- `Fixed` for any bug fixes.
- `Security` in case of vulnerabilities.

## [0.2.0] - 2024-06-10

No changes to templates.

## [0.2.0b4] - 2024-06-06

### Added
Expand Down
3 changes: 2 additions & 1 deletion cognite_toolkit/_cdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,8 @@ def build(
] = False,
) -> None:
"""Build configuration files from the module templates to a local build directory."""
cmd = BuildCommand()
user_command = f"cdf-tk {' '.join(sys.argv[1:])}"
cmd = BuildCommand(user_command=user_command)
cmd.execute(ctx.obj.verbose, Path(source_dir), Path(build_dir), build_env_name, no_clean)


Expand Down
9 changes: 7 additions & 2 deletions cognite_toolkit/_cdf_tk/_migration.yaml
Original file line number Diff line number Diff line change
@@ -1,13 +1,18 @@
- version: 0.2.0b4
- version: 0.2.0
cognite_modules: {}
resources: {}
tool: {}
cognite_modules_hash: ""
- version: 0.2.0b4
cognite_modules: {}
resources: {}
tool: {}
cognite_modules_hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
- version: 0.2.0b3
cognite_modules: {}
resources: {}
tool:
- title: "Function config YAML files must be in the function directori"
- title: "Function config YAML files must be in the function directory"
steps:
- Before functions config YAML files were allowed in subdirectories of the function directory. For example,
`functions/my_function/my_function.yaml` was allowed. This is no longer supported. The function config YAML
Expand Down
30 changes: 15 additions & 15 deletions cognite_toolkit/_cdf_tk/commands/_base.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
from __future__ import annotations

import traceback
from pathlib import Path

from cognite.client.data_classes._base import T_CogniteResourceList
from cognite.client.data_classes._base import T_CogniteResourceList, T_WritableCogniteResource, T_WriteClass
from rich import print
from rich.panel import Panel

from cognite_toolkit._cdf_tk.exceptions import ToolkitRequiredValueError, ToolkitYAMLFormatError
from cognite_toolkit._cdf_tk.loaders import (
ResourceLoader,
)
from cognite_toolkit._cdf_tk.loaders._base_loaders import T_ID, T_WritableCogniteResourceList
from cognite_toolkit._cdf_tk.tk_warnings import (
LowSeverityWarning,
ToolkitWarning,
Expand All @@ -21,8 +21,9 @@


class ToolkitCommand:
def __init__(self, print_warning: bool = True):
def __init__(self, print_warning: bool = True, user_command: str | None = None):
self.print_warning = print_warning
self.user_command = user_command
self.warning_list = WarningList[ToolkitWarning]()

def warn(self, warning: ToolkitWarning) -> None:
Expand All @@ -32,28 +33,27 @@ def warn(self, warning: ToolkitWarning) -> None:

def _load_files(
self,
loader: ResourceLoader,
loader: ResourceLoader[
T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList
],
filepaths: list[Path],
ToolGlobals: CDFToolConfig,
skip_validation: bool,
verbose: bool = False,
) -> T_CogniteResourceList | None:
) -> T_CogniteResourceList:
loaded_resources = loader.create_empty_of(loader.list_write_cls([]))
for filepath in filepaths:
try:
resource = loader.load_resource(filepath, ToolGlobals, skip_validation)
except KeyError as e:
# KeyError means that we are missing a required field in the yaml file.
print(
f"[bold red]ERROR:[/] Failed to load {filepath.name} with {loader.display_name}. Missing required field: {e}."
f"[bold red]ERROR:[/] Please compare with the API specification at {loader.doc_url()}."
raise ToolkitRequiredValueError(
f"Failed to load {filepath.name} with {loader.display_name}. Missing required field: {e}."
f"\nPlease compare with the API specification at {loader.doc_url()}."
)
return None
except Exception as e:
print(f"[bold red]ERROR:[/] Failed to load {filepath.name} with {loader.display_name}. Error: {e!r}.")
if verbose:
print(Panel(traceback.format_exc()))
return None
raise ToolkitYAMLFormatError(
f"Failed to load {filepath.name} with {loader.display_name}. Error: {e!r}."
)
if resource is None:
# This is intentional. It is, for example, used by the AuthLoader to skip groups with resource scopes.
continue
Expand Down
9 changes: 6 additions & 3 deletions cognite_toolkit/_cdf_tk/commands/_utils.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
from __future__ import annotations

from cognite.client.data_classes._base import T_CogniteResourceList
from cognite.client.data_classes._base import T_CogniteResourceList, T_WritableCogniteResource, T_WriteClass
from cognite.client.utils.useful_types import SequenceNotStr

from cognite_toolkit._cdf_tk.loaders import (
ResourceLoader,
)
from cognite_toolkit._cdf_tk.loaders._base_loaders import T_ID
from cognite_toolkit._cdf_tk.loaders._base_loaders import T_ID, T_WritableCogniteResourceList


def _print_ids_or_length(resource_ids: SequenceNotStr[T_ID], limit: int = 10) -> str:
Expand All @@ -19,7 +19,10 @@ def _print_ids_or_length(resource_ids: SequenceNotStr[T_ID], limit: int = 10) ->


def _remove_duplicates(
loaded_resources: T_CogniteResourceList, loader: ResourceLoader
loaded_resources: T_CogniteResourceList,
loader: ResourceLoader[
T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList
],
) -> tuple[T_CogniteResourceList, list[T_ID]]:
seen: set[T_ID] = set()
output = loader.create_empty_of(loaded_resources)
Expand Down
45 changes: 35 additions & 10 deletions cognite_toolkit/_cdf_tk/commands/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,10 @@
SystemYAML,
)
from cognite_toolkit._cdf_tk.exceptions import (
AmbiguousResourceFileError,
ToolkitDuplicatedModuleError,
ToolkitFileExistsError,
ToolkitMissingModulesError,
ToolkitNotADirectoryError,
ToolkitValidationError,
ToolkitYAMLFormatError,
Expand All @@ -43,7 +45,9 @@
DatapointsLoader,
FileLoader,
FunctionLoader,
GroupLoader,
Loader,
RawDatabaseLoader,
ResourceLoader,
)
from cognite_toolkit._cdf_tk.tk_warnings import (
Expand Down Expand Up @@ -76,14 +80,25 @@ def execute(self, verbose: bool, source_path: Path, build_dir: Path, build_env_n
if not source_path.is_dir():
raise ToolkitNotADirectoryError(str(source_path))

system_config = SystemYAML.load_from_directory(source_path, build_env_name, self.warn)
system_config = SystemYAML.load_from_directory(source_path, build_env_name, self.warn, self.user_command)
config = BuildConfigYAML.load_from_directory(source_path, build_env_name, self.warn)
sources = [module_dir for root_module in ROOT_MODULES if (module_dir := source_path / root_module).exists()]
if not sources:
directories = "\n".join(f" ┣ {name}" for name in ROOT_MODULES[:-1])
raise ToolkitMissingModulesError(
f"Could not find the source modules directory.\nExpected to find one of the following directories\n"
f"{source_path.name}\n{directories}\n{ROOT_MODULES[-1]}"
)
directory_name = "current directory" if source_path == Path(".") else f"project '{source_path!s}'"
module_locations = "\n".join(f" - Module directory '{source!s}'" for source in sources)
print(
Panel(
f"[bold]Building config files from templates into {build_dir!s} for environment {build_env_name} using {source_path!s} as sources...[/bold]"
f"\n[bold]Config file:[/] '{config.filepath.absolute()!s}'"
f"Building {directory_name}:\n - Environment {build_env_name!r}\n - Config '{config.filepath!s}'"
f"\n{module_locations}",
expand=False,
)
)

config.set_environment_variables()

self.build_config(
Expand Down Expand Up @@ -542,19 +557,29 @@ def _get_api_spec(self, loader: type[ResourceLoader], destination: Path) -> Para

def _get_loader(self, resource_folder: str, destination: Path) -> type[Loader] | None:
loaders = LOADER_BY_FOLDER_NAME.get(resource_folder, [])
loader: type[Loader] | None
if len(loaders) == 1:
return loaders[0]
else:
loader = next((loader for loader in loaders if loader.is_supported_file(destination)), None)
if loader is None:
loaders = [loader for loader in loaders if loader.is_supported_file(destination)]
if len(loaders) == 0:
self.warn(
ToolkitNotSupportedWarning(
f"the resource {resource_folder!r}",
details=f"Available resources are: {', '.join(LOADER_BY_FOLDER_NAME.keys())}",
)
)
return loader
elif len(loaders) > 1 and all(loader.folder_name == "raw" for loader in loaders):
# Multiple raw loaders load from the same file.
return RawDatabaseLoader
elif len(loaders) > 1 and all(issubclass(loader, GroupLoader) for loader in loaders):
# There are two group loaders, one for resource scoped and one for all scoped.
return GroupLoader
elif len(loaders) > 1:
names = " or ".join(f"{destination.stem}.{loader.kind}{destination.suffix}" for loader in loaders)
raise AmbiguousResourceFileError(
f"Ambiguous resource file {destination.name} in {destination.parent.name} folder. "
f"Unclear whether it is {' or '.join(loader.kind for loader in loaders)}."
f"\nPlease name the file {names}."
)

return loaders[0]

@staticmethod
def iterate_functions(module_dir: Path) -> Iterator[list[Path]]:
Expand Down
26 changes: 9 additions & 17 deletions cognite_toolkit/_cdf_tk/commands/clean.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from pathlib import Path

import typer
from cognite.client.data_classes._base import T_CogniteResourceList
from cognite.client.data_classes._base import T_CogniteResourceList, T_WritableCogniteResource, T_WriteClass
from cognite.client.exceptions import CogniteAPIError, CogniteNotFoundError
from cognite.client.utils.useful_types import SequenceNotStr
from rich import print
Expand All @@ -27,7 +27,7 @@
ResourceContainerLoader,
ResourceLoader,
)
from cognite_toolkit._cdf_tk.loaders._base_loaders import T_ID, Loader
from cognite_toolkit._cdf_tk.loaders._base_loaders import T_ID, Loader, T_WritableCogniteResourceList
from cognite_toolkit._cdf_tk.loaders.data_classes import ResourceContainerDeployResult, ResourceDeployResult
from cognite_toolkit._cdf_tk.tk_warnings import (
LowSeverityWarning,
Expand All @@ -46,13 +46,15 @@
class CleanCommand(ToolkitCommand):
def clean_resources(
self,
loader: ResourceLoader,
loader: ResourceLoader[
T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList
],
ToolGlobals: CDFToolConfig,
dry_run: bool = False,
drop: bool = True,
drop_data: bool = False,
verbose: bool = False,
) -> ResourceDeployResult | None:
) -> ResourceDeployResult:
if not isinstance(loader, ResourceContainerLoader) and not drop:
# Skipping silently as this, we will not drop data or delete this resource
return ResourceDeployResult(name=loader.display_name)
Expand All @@ -70,14 +72,10 @@ def clean_resources(
filepaths = loader.find_files()

# Since we do a clean, we do not want to verify that everything exists wrt data sets, spaces etc.
loaded_resources = self._load_files(loader, filepaths, ToolGlobals, skip_validation=True, verbose=verbose)
if loaded_resources is None:
ToolGlobals.failed = True
return None
loaded_resources = self._load_files(loader, filepaths, ToolGlobals, skip_validation=True)

# Duplicates should be handled on the build step,
# but in case any of them slip through, we do it here as well to
# avoid an error.
# Duplicates are warned in the build step, but the use might continue, so we
# need to check for duplicates here as well.
loaded_resources, duplicates = _remove_duplicates(loaded_resources, loader)

capabilities = loader.get_required_capability(loaded_resources)
Expand Down Expand Up @@ -275,12 +273,6 @@ def execute(
)
if result:
results[result.name] = result
if ToolGlobals.failed:
if results and results.has_counts:
print(results.counts_table())
if results and results.has_uploads:
print(results.uploads_table())
raise ToolkitCleanResourceError(f"Failure to clean {loader.display_name} as expected.")
if results.has_counts:
print(results.counts_table())
if results.has_uploads:
Expand Down
Loading

0 comments on commit 038f435

Please sign in to comment.