From a5dac4e4ffeb45801351cac877d59cd43cc52b91 Mon Sep 17 00:00:00 2001 From: Piotr Czarnas Date: Sun, 27 Oct 2024 21:50:06 +0100 Subject: [PATCH] Documentation and the python client updated after teradata connector was added. --- .../python/dqops/client/models/__init__.py | 4 + .../dqops/client/models/connection_model.py | 18 + .../dqops/client/models/connection_spec.py | 18 + .../dqops/client/models/display_hint.py | 1 + .../client/models/dqo_user_profile_model.py | 9 + .../dqops/client/models/provider_type.py | 1 + .../client/models/teradata_parameters_spec.py | 121 + .../teradata_parameters_spec_properties.py | 45 + .../expected-numbers-in-use-count.md | 512 + .../expected-text-values-in-use-count.md | 652 +- .../expected-texts-in-top-values-count.md | 1476 ++- .../number-found-in-set-percent.md | 566 +- .../text-found-in-set-percent.md | 640 +- .../text-valid-country-code-percent.md | 422 + .../text-valid-currency-code-percent.md | 422 + .../accuracy/total-average-match-percent.md | 105 + .../accuracy/total-max-match-percent.md | 105 + .../accuracy/total-min-match-percent.md | 105 + .../total-not-null-count-match-percent.md | 105 + .../accuracy/total-sum-match-percent.md | 105 + docs/checks/column/anomaly/max-anomaly.md | 144 + docs/checks/column/anomaly/mean-anomaly.md | 144 + .../column/anomaly/mean-change-1-day.md | 144 + .../column/anomaly/mean-change-30-days.md | 144 + .../column/anomaly/mean-change-7-days.md | 144 + docs/checks/column/anomaly/mean-change.md | 144 + docs/checks/column/anomaly/median-anomaly.md | 162 + .../column/anomaly/median-change-1-day.md | 162 + .../column/anomaly/median-change-30-days.md | 162 + .../column/anomaly/median-change-7-days.md | 162 + docs/checks/column/anomaly/median-change.md | 162 + docs/checks/column/anomaly/min-anomaly.md | 144 + docs/checks/column/anomaly/sum-anomaly.md | 144 + .../checks/column/anomaly/sum-change-1-day.md | 144 + .../column/anomaly/sum-change-30-days.md | 144 + .../column/anomaly/sum-change-7-days.md | 144 + docs/checks/column/anomaly/sum-change.md | 144 + docs/checks/column/bool/false-percent.md | 422 + docs/checks/column/bool/true-percent.md | 422 + docs/checks/column/comparisons/max-match.md | 118 + docs/checks/column/comparisons/mean-match.md | 118 + docs/checks/column/comparisons/min-match.md | 118 + .../comparisons/not-null-count-match.md | 128 + .../column/comparisons/null-count-match.md | 168 + docs/checks/column/comparisons/sum-match.md | 118 + .../text-parsable-to-boolean-percent.md | 422 + .../text-parsable-to-date-percent.md | 632 ++ .../text-parsable-to-float-percent.md | 383 + .../text-parsable-to-integer-percent.md | 342 + .../import-custom-result-on-column.md | 66 + .../sql-aggregate-expression-on-column.md | 252 + .../sql-condition-failed-on-column.md | 392 + .../sql-condition-passed-percent-on-column.md | 452 + .../detected-datatype-in-text-changed.md | 2496 ++++- .../datatype/detected-datatype-in-text.md | 2496 ++++- .../column/datetime/date-in-range-percent.md | 412 + .../datetime/date-values-in-future-percent.md | 532 + .../text-match-date-format-percent.md | 452 + .../integrity/lookup-key-found-percent.md | 412 + .../column/integrity/lookup-key-not-found.md | 412 + .../checks/column/nulls/empty-column-found.md | 262 + docs/checks/column/nulls/not-nulls-count.md | 262 + docs/checks/column/nulls/not-nulls-percent.md | 302 + docs/checks/column/nulls/nulls-count.md | 342 + .../column/nulls/nulls-percent-anomaly.md | 240 + .../nulls/nulls-percent-change-1-day.md | 240 + .../nulls/nulls-percent-change-30-days.md | 240 + .../nulls/nulls-percent-change-7-days.md | 240 + .../column/nulls/nulls-percent-change.md | 240 + docs/checks/column/nulls/nulls-percent.md | 402 + .../numeric/integer-in-range-percent.md | 402 + .../checks/column/numeric/invalid-latitude.md | 342 + .../column/numeric/invalid-longitude.md | 342 + docs/checks/column/numeric/max-in-range.md | 242 + docs/checks/column/numeric/mean-in-range.md | 242 + docs/checks/column/numeric/median-in-range.md | 272 + docs/checks/column/numeric/min-in-range.md | 242 + .../column/numeric/negative-values-percent.md | 402 + docs/checks/column/numeric/negative-values.md | 342 + .../numeric/non-negative-values-percent.md | 402 + .../column/numeric/non-negative-values.md | 342 + .../numeric/number-above-max-value-percent.md | 402 + .../column/numeric/number-above-max-value.md | 342 + .../numeric/number-below-min-value-percent.md | 402 + .../column/numeric/number-below-min-value.md | 342 + .../column/numeric/number-in-range-percent.md | 402 + .../column/numeric/percentile-10-in-range.md | 272 + .../column/numeric/percentile-25-in-range.md | 272 + .../column/numeric/percentile-75-in-range.md | 272 + .../column/numeric/percentile-90-in-range.md | 272 + .../column/numeric/percentile-in-range.md | 272 + .../numeric/population-stddev-in-range.md | 242 + .../numeric/population-variance-in-range.md | 242 + .../column/numeric/sample-stddev-in-range.md | 242 + .../numeric/sample-variance-in-range.md | 242 + docs/checks/column/numeric/sum-in-range.md | 242 + .../column/numeric/valid-latitude-percent.md | 402 + .../column/numeric/valid-longitude-percent.md | 402 + .../patterns/invalid-email-format-found.md | 382 + .../patterns/invalid-email-format-percent.md | 462 + .../invalid-ip4-address-format-found.md | 382 + .../invalid-ip6-address-format-found.md | 382 + .../invalid-usa-phone-format-found.md | 422 + .../invalid-usa-phone-format-percent.md | 482 + .../invalid-usa-zipcode-format-found.md | 422 + .../invalid-usa-zipcode-format-percent.md | 482 + .../patterns/invalid-uuid-format-found.md | 382 + .../patterns/invalid-uuid-format-percent.md | 442 + .../text-not-matching-date-pattern-found.md | 452 + .../text-not-matching-date-pattern-percent.md | 452 + .../text-not-matching-name-pattern-percent.md | 442 + .../patterns/text-not-matching-regex-found.md | 452 + .../texts-not-matching-regex-percent.md | 452 + .../column/pii/contains-email-percent.md | 442 + .../checks/column/pii/contains-ip4-percent.md | 442 + .../checks/column/pii/contains-ip6-percent.md | 502 + .../column/pii/contains-usa-phone-percent.md | 462 + .../pii/contains-usa-zipcode-percent.md | 462 + docs/checks/column/text/max-word-count.md | 282 + docs/checks/column/text/min-word-count.md | 282 + .../text-length-above-max-length-percent.md | 432 + .../text/text-length-above-max-length.md | 362 + .../text-length-below-min-length-percent.md | 422 + .../text/text-length-below-min-length.md | 362 + .../text/text-length-in-range-percent.md | 422 + docs/checks/column/text/text-max-length.md | 282 + docs/checks/column/text/text-mean-length.md | 282 + docs/checks/column/text/text-min-length.md | 282 + .../uniqueness/distinct-count-anomaly.md | 168 + .../uniqueness/distinct-count-change-1-day.md | 168 + .../distinct-count-change-30-days.md | 168 + .../distinct-count-change-7-days.md | 168 + .../uniqueness/distinct-count-change.md | 282 + .../column/uniqueness/distinct-count.md | 282 + .../uniqueness/distinct-percent-anomaly.md | 192 + .../distinct-percent-change-1-day.md | 192 + .../distinct-percent-change-30-days.md | 192 + .../distinct-percent-change-7-days.md | 192 + .../uniqueness/distinct-percent-change.md | 322 + .../column/uniqueness/distinct-percent.md | 322 + .../column/uniqueness/duplicate-count.md | 262 + .../column/uniqueness/duplicate-percent.md | 342 + .../column/whitespace/empty-text-found.md | 382 + .../column/whitespace/empty-text-percent.md | 442 + .../whitespace/null-placeholder-text-found.md | 372 + .../null-placeholder-text-percent.md | 422 + .../text-surrounded-by-whitespace-found.md | 402 + .../text-surrounded-by-whitespace-percent.md | 472 + .../whitespace/whitespace-text-found.md | 402 + .../whitespace/whitespace-text-percent.md | 462 + .../accuracy/total-row-count-match-percent.md | 105 + .../table/availability/table-availability.md | 111 + .../table/comparisons/row-count-match.md | 118 + .../import-custom-result-on-table.md | 54 + .../sql-aggregate-expression-on-table.md | 252 + .../sql-condition-failed-on-table.md | 372 + .../sql-condition-passed-percent-on-table.md | 412 + .../timeliness/data-freshness-anomaly.md | 236 + .../checks/table/timeliness/data-freshness.md | 354 + .../table/timeliness/data-ingestion-delay.md | 792 ++ .../checks/table/timeliness/data-staleness.md | 354 + docs/checks/table/timeliness/reload-lag.md | 268 + .../uniqueness/duplicate-record-count.md | 540 + .../uniqueness/duplicate-record-percent.md | 540 + docs/checks/table/volume/row-count-anomaly.md | 144 + .../table/volume/row-count-change-1-day.md | 144 + .../table/volume/row-count-change-30-days.md | 144 + .../table/volume/row-count-change-7-days.md | 144 + docs/checks/table/volume/row-count-change.md | 242 + docs/checks/table/volume/row-count.md | 242 + docs/client/models/common.md | 3 +- docs/client/models/environment.md | 1 + docs/client/operations/environment.md | 15 +- docs/command-line-interface/connection.md | 30 +- docs/command-line-interface/sensor.md | 2 +- docs/reference/rules/Averages.md | 21 +- docs/reference/rules/Percentile.md | 246 +- .../column/accepted_values-column-sensors.md | 291 + .../sensors/column/accuracy-column-sensors.md | 110 + .../sensors/column/bool-column-sensors.md | 44 + .../column/conversions-column-sensors.md | 91 + .../column/custom_sql-column-sensors.md | 112 + .../sensors/column/datatype-column-sensors.md | 88 + .../sensors/column/datetime-column-sensors.md | 78 + .../column/integrity-column-sensors.md | 44 + .../sensors/column/nulls-column-sensors.md | 69 + .../sensors/column/numeric-column-sensors.md | 369 + .../sensors/column/patterns-column-sensors.md | 340 + .../sensors/column/pii-column-sensors.md | 120 + .../sensors/column/range-column-sensors.md | 26 + .../sensors/column/sampling-column-sensors.md | 28 + .../sensors/column/text-column-sensors.md | 180 + .../column/uniqueness-column-sensors.md | 64 + .../column/whitespace-column-sensors.md | 176 + .../sensors/table/accuracy-table-sensors.md | 22 + .../table/availability-table-sensors.md | 21 + .../sensors/table/custom_sql-table-sensors.md | 104 + .../sensors/table/timeliness-table-sensors.md | 196 + .../sensors/table/uniqueness-table-sensors.md | 60 + .../sensors/table/volume-table-sensors.md | 13 + docs/reference/yaml/ConnectionYaml.md | 21 +- docs/reference/yaml/SensorDefinitionYaml.md | 2 +- .../swagger-api/dqops-api-openapi-3.json | 9846 +++++++++-------- 203 files changed, 62719 insertions(+), 5841 deletions(-) create mode 100644 distribution/python/dqops/client/models/teradata_parameters_spec.py create mode 100644 distribution/python/dqops/client/models/teradata_parameters_spec_properties.py diff --git a/distribution/python/dqops/client/models/__init__.py b/distribution/python/dqops/client/models/__init__.py index c9a9225a9b..de2a91d499 100644 --- a/distribution/python/dqops/client/models/__init__.py +++ b/distribution/python/dqops/client/models/__init__.py @@ -2105,6 +2105,8 @@ from .target_rule_severity_level import TargetRuleSeverityLevel from .target_table_pattern_spec import TargetTablePatternSpec from .temporal_unit import TemporalUnit +from .teradata_parameters_spec import TeradataParametersSpec +from .teradata_parameters_spec_properties import TeradataParametersSpecProperties from .text_built_in_date_formats import TextBuiltInDateFormats from .time_period_gradient import TimePeriodGradient from .time_window_filter_parameters import TimeWindowFilterParameters @@ -3101,6 +3103,8 @@ "TargetRuleSeverityLevel", "TargetTablePatternSpec", "TemporalUnit", + "TeradataParametersSpec", + "TeradataParametersSpecProperties", "TextBuiltInDateFormats", "TimePeriodGradient", "TimestampColumnsSpec", diff --git a/distribution/python/dqops/client/models/connection_model.py b/distribution/python/dqops/client/models/connection_model.py index 6912fd13f2..b55c3a1744 100644 --- a/distribution/python/dqops/client/models/connection_model.py +++ b/distribution/python/dqops/client/models/connection_model.py @@ -33,6 +33,7 @@ from ..models.statistics_collector_search_filters import ( StatisticsCollectorSearchFilters, ) + from ..models.teradata_parameters_spec import TeradataParametersSpec from ..models.trino_parameters_spec import TrinoParametersSpec @@ -70,6 +71,7 @@ class ConnectionModel: mariadb (Union[Unset, MariaDbParametersSpec]): clickhouse (Union[Unset, ClickHouseParametersSpec]): questdb (Union[Unset, QuestDbParametersSpec]): + teradata (Union[Unset, TeradataParametersSpec]): run_checks_job_template (Union[Unset, CheckSearchFilters]): Target data quality checks filter, identifies which checks on which tables and columns should be executed. run_profiling_checks_job_template (Union[Unset, CheckSearchFilters]): Target data quality checks filter, @@ -115,6 +117,7 @@ class ConnectionModel: mariadb: Union[Unset, "MariaDbParametersSpec"] = UNSET clickhouse: Union[Unset, "ClickHouseParametersSpec"] = UNSET questdb: Union[Unset, "QuestDbParametersSpec"] = UNSET + teradata: Union[Unset, "TeradataParametersSpec"] = UNSET run_checks_job_template: Union[Unset, "CheckSearchFilters"] = UNSET run_profiling_checks_job_template: Union[Unset, "CheckSearchFilters"] = UNSET run_monitoring_checks_job_template: Union[Unset, "CheckSearchFilters"] = UNSET @@ -208,6 +211,10 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.questdb, Unset): questdb = self.questdb.to_dict() + teradata: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.teradata, Unset): + teradata = self.teradata.to_dict() + run_checks_job_template: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.run_checks_job_template, Unset): run_checks_job_template = self.run_checks_job_template.to_dict() @@ -297,6 +304,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["clickhouse"] = clickhouse if questdb is not UNSET: field_dict["questdb"] = questdb + if teradata is not UNSET: + field_dict["teradata"] = teradata if run_checks_job_template is not UNSET: field_dict["run_checks_job_template"] = run_checks_job_template if run_profiling_checks_job_template is not UNSET: @@ -360,6 +369,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.statistics_collector_search_filters import ( StatisticsCollectorSearchFilters, ) + from ..models.teradata_parameters_spec import TeradataParametersSpec from ..models.trino_parameters_spec import TrinoParametersSpec d = src_dict.copy() @@ -497,6 +507,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: questdb = QuestDbParametersSpec.from_dict(_questdb) + _teradata = d.pop("teradata", UNSET) + teradata: Union[Unset, TeradataParametersSpec] + if isinstance(_teradata, Unset): + teradata = UNSET + else: + teradata = TeradataParametersSpec.from_dict(_teradata) + _run_checks_job_template = d.pop("run_checks_job_template", UNSET) run_checks_job_template: Union[Unset, CheckSearchFilters] if isinstance(_run_checks_job_template, Unset): @@ -603,6 +620,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: mariadb=mariadb, clickhouse=clickhouse, questdb=questdb, + teradata=teradata, run_checks_job_template=run_checks_job_template, run_profiling_checks_job_template=run_profiling_checks_job_template, run_monitoring_checks_job_template=run_monitoring_checks_job_template, diff --git a/distribution/python/dqops/client/models/connection_spec.py b/distribution/python/dqops/client/models/connection_spec.py index 4f84c36479..2425d6c1f6 100644 --- a/distribution/python/dqops/client/models/connection_spec.py +++ b/distribution/python/dqops/client/models/connection_spec.py @@ -33,6 +33,7 @@ from ..models.snowflake_parameters_spec import SnowflakeParametersSpec from ..models.spark_parameters_spec import SparkParametersSpec from ..models.sql_server_parameters_spec import SqlServerParametersSpec + from ..models.teradata_parameters_spec import TeradataParametersSpec from ..models.trino_parameters_spec import TrinoParametersSpec @@ -61,6 +62,7 @@ class ConnectionSpec: mariadb (Union[Unset, MariaDbParametersSpec]): clickhouse (Union[Unset, ClickHouseParametersSpec]): questdb (Union[Unset, QuestDbParametersSpec]): + teradata (Union[Unset, TeradataParametersSpec]): parallel_jobs_limit (Union[Unset, int]): The concurrency limit for the maximum number of parallel SQL queries executed on this connection. default_grouping_configuration (Union[Unset, DataGroupingConfigurationSpec]): @@ -98,6 +100,7 @@ class ConnectionSpec: mariadb: Union[Unset, "MariaDbParametersSpec"] = UNSET clickhouse: Union[Unset, "ClickHouseParametersSpec"] = UNSET questdb: Union[Unset, "QuestDbParametersSpec"] = UNSET + teradata: Union[Unset, "TeradataParametersSpec"] = UNSET parallel_jobs_limit: Union[Unset, int] = UNSET default_grouping_configuration: Union[Unset, "DataGroupingConfigurationSpec"] = ( UNSET @@ -184,6 +187,10 @@ def to_dict(self) -> Dict[str, Any]: if not isinstance(self.questdb, Unset): questdb = self.questdb.to_dict() + teradata: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.teradata, Unset): + teradata = self.teradata.to_dict() + parallel_jobs_limit = self.parallel_jobs_limit default_grouping_configuration: Union[Unset, Dict[str, Any]] = UNSET if not isinstance(self.default_grouping_configuration, Unset): @@ -259,6 +266,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["clickhouse"] = clickhouse if questdb is not UNSET: field_dict["questdb"] = questdb + if teradata is not UNSET: + field_dict["teradata"] = teradata if parallel_jobs_limit is not UNSET: field_dict["parallel_jobs_limit"] = parallel_jobs_limit if default_grouping_configuration is not UNSET: @@ -312,6 +321,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: from ..models.snowflake_parameters_spec import SnowflakeParametersSpec from ..models.spark_parameters_spec import SparkParametersSpec from ..models.sql_server_parameters_spec import SqlServerParametersSpec + from ..models.teradata_parameters_spec import TeradataParametersSpec from ..models.trino_parameters_spec import TrinoParametersSpec d = src_dict.copy() @@ -441,6 +451,13 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: else: questdb = QuestDbParametersSpec.from_dict(_questdb) + _teradata = d.pop("teradata", UNSET) + teradata: Union[Unset, TeradataParametersSpec] + if isinstance(_teradata, Unset): + teradata = UNSET + else: + teradata = TeradataParametersSpec.from_dict(_teradata) + parallel_jobs_limit = d.pop("parallel_jobs_limit", UNSET) _default_grouping_configuration = d.pop("default_grouping_configuration", UNSET) @@ -514,6 +531,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: mariadb=mariadb, clickhouse=clickhouse, questdb=questdb, + teradata=teradata, parallel_jobs_limit=parallel_jobs_limit, default_grouping_configuration=default_grouping_configuration, schedules=schedules, diff --git a/distribution/python/dqops/client/models/display_hint.py b/distribution/python/dqops/client/models/display_hint.py index 5892d2d88c..18683a8ab7 100644 --- a/distribution/python/dqops/client/models/display_hint.py +++ b/distribution/python/dqops/client/models/display_hint.py @@ -3,6 +3,7 @@ class DisplayHint(str, Enum): COLUMN_NAMES = "column_names" + REQUIRES_PAID_VERSION = "requires_paid_version" TEXTAREA = "textarea" def __str__(self) -> str: diff --git a/distribution/python/dqops/client/models/dqo_user_profile_model.py b/distribution/python/dqops/client/models/dqo_user_profile_model.py index 96676d188e..6fe5d2986e 100644 --- a/distribution/python/dqops/client/models/dqo_user_profile_model.py +++ b/distribution/python/dqops/client/models/dqo_user_profile_model.py @@ -62,6 +62,8 @@ class DqoUserProfileModel: ENTERPRISE license of DQOps. can_synchronize_to_data_catalog (Union[Unset, bool]): User can synchronize data to a data catalog. The instance must be configured correctly and the user must have at least an EDITOR role. + can_use_ai_anomaly_detection (Union[Unset, bool]): The DQOps instance is a paid version with advanced AI anomaly + prediction. """ user: Union[Unset, str] = UNSET @@ -95,6 +97,7 @@ class DqoUserProfileModel: can_change_own_password: Union[Unset, bool] = UNSET can_use_data_domains: Union[Unset, bool] = UNSET can_synchronize_to_data_catalog: Union[Unset, bool] = UNSET + can_use_ai_anomaly_detection: Union[Unset, bool] = UNSET additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> Dict[str, Any]: @@ -134,6 +137,7 @@ def to_dict(self) -> Dict[str, Any]: can_change_own_password = self.can_change_own_password can_use_data_domains = self.can_use_data_domains can_synchronize_to_data_catalog = self.can_synchronize_to_data_catalog + can_use_ai_anomaly_detection = self.can_use_ai_anomaly_detection field_dict: Dict[str, Any] = {} field_dict.update(self.additional_properties) @@ -206,6 +210,8 @@ def to_dict(self) -> Dict[str, Any]: field_dict["can_synchronize_to_data_catalog"] = ( can_synchronize_to_data_catalog ) + if can_use_ai_anomaly_detection is not UNSET: + field_dict["can_use_ai_anomaly_detection"] = can_use_ai_anomaly_detection return field_dict @@ -285,6 +291,8 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: "can_synchronize_to_data_catalog", UNSET ) + can_use_ai_anomaly_detection = d.pop("can_use_ai_anomaly_detection", UNSET) + dqo_user_profile_model = cls( user=user, tenant=tenant, @@ -317,6 +325,7 @@ def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: can_change_own_password=can_change_own_password, can_use_data_domains=can_use_data_domains, can_synchronize_to_data_catalog=can_synchronize_to_data_catalog, + can_use_ai_anomaly_detection=can_use_ai_anomaly_detection, ) dqo_user_profile_model.additional_properties = d diff --git a/distribution/python/dqops/client/models/provider_type.py b/distribution/python/dqops/client/models/provider_type.py index 09acc7c134..177b8dd000 100644 --- a/distribution/python/dqops/client/models/provider_type.py +++ b/distribution/python/dqops/client/models/provider_type.py @@ -18,6 +18,7 @@ class ProviderType(str, Enum): SNOWFLAKE = "snowflake" SPARK = "spark" SQLSERVER = "sqlserver" + TERADATA = "teradata" TRINO = "trino" def __str__(self) -> str: diff --git a/distribution/python/dqops/client/models/teradata_parameters_spec.py b/distribution/python/dqops/client/models/teradata_parameters_spec.py new file mode 100644 index 0000000000..11a7e0eea8 --- /dev/null +++ b/distribution/python/dqops/client/models/teradata_parameters_spec.py @@ -0,0 +1,121 @@ +from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.teradata_parameters_spec_properties import ( + TeradataParametersSpecProperties, + ) + + +T = TypeVar("T", bound="TeradataParametersSpec") + + +@_attrs_define +class TeradataParametersSpec: + """ + Attributes: + host (Union[Unset, str]): Teradata host name. Supports also a ${TERADATA_HOST} configuration with a custom + environment variable. + port (Union[Unset, str]): Teradata port number. The default port is 1025. Supports also a ${TERADATA_PORT} + configuration with a custom environment variable. + user (Union[Unset, str]): Teradata user name. The value can be in the ${ENVIRONMENT_VARIABLE_NAME} format to use + dynamic substitution. + password (Union[Unset, str]): Teradata database password. The value can be in the ${ENVIRONMENT_VARIABLE_NAME} + format to use dynamic substitution. + properties (Union[Unset, TeradataParametersSpecProperties]): A dictionary of custom JDBC parameters that are + added to the JDBC connection string, a key/value dictionary. + database (Union[Unset, str]): + """ + + host: Union[Unset, str] = UNSET + port: Union[Unset, str] = UNSET + user: Union[Unset, str] = UNSET + password: Union[Unset, str] = UNSET + properties: Union[Unset, "TeradataParametersSpecProperties"] = UNSET + database: Union[Unset, str] = UNSET + additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + host = self.host + port = self.port + user = self.user + password = self.password + properties: Union[Unset, Dict[str, Any]] = UNSET + if not isinstance(self.properties, Unset): + properties = self.properties.to_dict() + + database = self.database + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + if host is not UNSET: + field_dict["host"] = host + if port is not UNSET: + field_dict["port"] = port + if user is not UNSET: + field_dict["user"] = user + if password is not UNSET: + field_dict["password"] = password + if properties is not UNSET: + field_dict["properties"] = properties + if database is not UNSET: + field_dict["database"] = database + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + from ..models.teradata_parameters_spec_properties import ( + TeradataParametersSpecProperties, + ) + + d = src_dict.copy() + host = d.pop("host", UNSET) + + port = d.pop("port", UNSET) + + user = d.pop("user", UNSET) + + password = d.pop("password", UNSET) + + _properties = d.pop("properties", UNSET) + properties: Union[Unset, TeradataParametersSpecProperties] + if isinstance(_properties, Unset): + properties = UNSET + else: + properties = TeradataParametersSpecProperties.from_dict(_properties) + + database = d.pop("database", UNSET) + + teradata_parameters_spec = cls( + host=host, + port=port, + user=user, + password=password, + properties=properties, + database=database, + ) + + teradata_parameters_spec.additional_properties = d + return teradata_parameters_spec + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/distribution/python/dqops/client/models/teradata_parameters_spec_properties.py b/distribution/python/dqops/client/models/teradata_parameters_spec_properties.py new file mode 100644 index 0000000000..280dfa0cf1 --- /dev/null +++ b/distribution/python/dqops/client/models/teradata_parameters_spec_properties.py @@ -0,0 +1,45 @@ +from typing import Any, Dict, List, Type, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +T = TypeVar("T", bound="TeradataParametersSpecProperties") + + +@_attrs_define +class TeradataParametersSpecProperties: + """A dictionary of custom JDBC parameters that are added to the JDBC connection string, a key/value dictionary.""" + + additional_properties: Dict[str, str] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> Dict[str, Any]: + + field_dict: Dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update({}) + + return field_dict + + @classmethod + def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T: + d = src_dict.copy() + teradata_parameters_spec_properties = cls() + + teradata_parameters_spec_properties.additional_properties = d + return teradata_parameters_spec_properties + + @property + def additional_keys(self) -> List[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> str: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: str) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/docs/checks/column/accepted_values/expected-numbers-in-use-count.md b/docs/checks/column/accepted_values/expected-numbers-in-use-count.md index 5b0cc4714a..1c4bffac84 100644 --- a/docs/checks/column/accepted_values/expected-numbers-in-use-count.md +++ b/docs/checks/column/accepted_values/expected-numbers-in-use-count.md @@ -1019,6 +1019,55 @@ spec: MAX(2) AS expected_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values | length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN analyzed_table."target_column" + ELSE NULL + END + ) AS actual_value, + MAX(2) AS expected_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2071,6 +2120,57 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values | length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN analyzed_table."target_column" + ELSE NULL + END + ) AS actual_value, + MAX(2) AS expected_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3149,6 +3249,55 @@ spec: MAX(2) AS expected_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values | length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN analyzed_table."target_column" + ELSE NULL + END + ) AS actual_value, + MAX(2) AS expected_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4202,6 +4351,57 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values | length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN analyzed_table."target_column" + ELSE NULL + END + ) AS actual_value, + MAX(2) AS expected_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5280,6 +5480,55 @@ spec: MAX(2) AS expected_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values | length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN analyzed_table."target_column" + ELSE NULL + END + ) AS actual_value, + MAX(2) AS expected_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6333,6 +6582,57 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values | length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN analyzed_table."target_column" + ELSE NULL + END + ) AS actual_value, + MAX(2) AS expected_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -7496,6 +7796,59 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values | length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN analyzed_table."target_column" + ELSE NULL + END + ) AS actual_value, + MAX(2) AS expected_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8606,6 +8959,59 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values | length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN analyzed_table."target_column" + ELSE NULL + END + ) AS actual_value, + MAX(2) AS expected_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -9773,6 +10179,59 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values | length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN analyzed_table."target_column" + ELSE NULL + END + ) AS actual_value, + MAX(2) AS expected_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -10883,6 +11342,59 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values | length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN analyzed_table."target_column" + ELSE NULL + END + ) AS actual_value, + MAX(2) AS expected_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/accepted_values/expected-text-values-in-use-count.md b/docs/checks/column/accepted_values/expected-text-values-in-use-count.md index aabb124902..0f3873b190 100644 --- a/docs/checks/column/accepted_values/expected-text-values-in-use-count.md +++ b/docs/checks/column/accepted_values/expected-text-values-in-use-count.md @@ -1129,6 +1129,67 @@ spec: MAX(3) AS expected_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{ lib.make_text_constant(i) }}, + {%- else -%} + {{ lib.make_text_constant(i) }} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_else() -%} + {%- if parameters.expected_values|length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {%- endmacro -%} + + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE {{ render_else() }} + END AS actual_value, + MAX(CAST({{ parameters.expected_values | length }} AS INT)) AS expected_value_alias + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN ('USD','GBP','EUR') + THEN analyzed_table."target_column" + ELSE NULL + END + ) + END AS actual_value, + MAX(CAST(3 AS INT)) AS expected_value_alias + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2297,6 +2358,69 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{ lib.make_text_constant(i) }}, + {%- else -%} + {{ lib.make_text_constant(i) }} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_else() -%} + {%- if parameters.expected_values|length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {%- endmacro -%} + + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE {{ render_else() }} + END AS actual_value, + MAX(CAST({{ parameters.expected_values | length }} AS INT)) AS expected_value_alias + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN ('USD','GBP','EUR') + THEN analyzed_table."target_column" + ELSE NULL + END + ) + END AS actual_value, + MAX(CAST(3 AS INT)) AS expected_value_alias, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3491,6 +3615,67 @@ spec: MAX(3) AS expected_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{ lib.make_text_constant(i) }}, + {%- else -%} + {{ lib.make_text_constant(i) }} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_else() -%} + {%- if parameters.expected_values|length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {%- endmacro -%} + + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE {{ render_else() }} + END AS actual_value, + MAX(CAST({{ parameters.expected_values | length }} AS INT)) AS expected_value_alias + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN ('USD','GBP','EUR') + THEN analyzed_table."target_column" + ELSE NULL + END + ) + END AS actual_value, + MAX(CAST(3 AS INT)) AS expected_value_alias + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4660,6 +4845,69 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{ lib.make_text_constant(i) }}, + {%- else -%} + {{ lib.make_text_constant(i) }} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_else() -%} + {%- if parameters.expected_values|length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {%- endmacro -%} + + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE {{ render_else() }} + END AS actual_value, + MAX(CAST({{ parameters.expected_values | length }} AS INT)) AS expected_value_alias + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN ('USD','GBP','EUR') + THEN analyzed_table."target_column" + ELSE NULL + END + ) + END AS actual_value, + MAX(CAST(3 AS INT)) AS expected_value_alias, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5854,6 +6102,67 @@ spec: MAX(3) AS expected_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{ lib.make_text_constant(i) }}, + {%- else -%} + {{ lib.make_text_constant(i) }} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_else() -%} + {%- if parameters.expected_values|length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {%- endmacro -%} + + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE {{ render_else() }} + END AS actual_value, + MAX(CAST({{ parameters.expected_values | length }} AS INT)) AS expected_value_alias + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN ('USD','GBP','EUR') + THEN analyzed_table."target_column" + ELSE NULL + END + ) + END AS actual_value, + MAX(CAST(3 AS INT)) AS expected_value_alias + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -7013,16 +7322,79 @@ Expand the *Configure with data grouping* section to see additional examples for END ) END AS actual_value, - MAX(3) AS expected_value, - analyzed_table.[country] AS grouping_level_1, - analyzed_table.[state] AS grouping_level_2 - FROM [your_sql_server_database].[].[] AS analyzed_table - GROUP BY analyzed_table.[country], analyzed_table.[state] - ORDER BY level_1, level_2 - , - - - + MAX(3) AS expected_value, + analyzed_table.[country] AS grouping_level_1, + analyzed_table.[state] AS grouping_level_2 + FROM [your_sql_server_database].[].[] AS analyzed_table + GROUP BY analyzed_table.[country], analyzed_table.[state] + ORDER BY level_1, level_2 + , + + + + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{ lib.make_text_constant(i) }}, + {%- else -%} + {{ lib.make_text_constant(i) }} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_else() -%} + {%- if parameters.expected_values|length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {%- endmacro -%} + + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE {{ render_else() }} + END AS actual_value, + MAX(CAST({{ parameters.expected_values | length }} AS INT)) AS expected_value_alias + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN ('USD','GBP','EUR') + THEN analyzed_table."target_column" + ELSE NULL + END + ) + END AS actual_value, + MAX(CAST(3 AS INT)) AS expected_value_alias, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -8302,6 +8674,71 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{ lib.make_text_constant(i) }}, + {%- else -%} + {{ lib.make_text_constant(i) }} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_else() -%} + {%- if parameters.expected_values|length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {%- endmacro -%} + + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE {{ render_else() }} + END AS actual_value, + MAX(CAST({{ parameters.expected_values | length }} AS INT)) AS expected_value_alias + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN ('USD','GBP','EUR') + THEN analyzed_table."target_column" + ELSE NULL + END + ) + END AS actual_value, + MAX(CAST(3 AS INT)) AS expected_value_alias, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9528,6 +9965,71 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{ lib.make_text_constant(i) }}, + {%- else -%} + {{ lib.make_text_constant(i) }} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_else() -%} + {%- if parameters.expected_values|length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {%- endmacro -%} + + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE {{ render_else() }} + END AS actual_value, + MAX(CAST({{ parameters.expected_values | length }} AS INT)) AS expected_value_alias + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN ('USD','GBP','EUR') + THEN analyzed_table."target_column" + ELSE NULL + END + ) + END AS actual_value, + MAX(CAST(3 AS INT)) AS expected_value_alias, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -10811,6 +11313,71 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{ lib.make_text_constant(i) }}, + {%- else -%} + {{ lib.make_text_constant(i) }} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_else() -%} + {%- if parameters.expected_values|length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {%- endmacro -%} + + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE {{ render_else() }} + END AS actual_value, + MAX(CAST({{ parameters.expected_values | length }} AS INT)) AS expected_value_alias + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN ('USD','GBP','EUR') + THEN analyzed_table."target_column" + ELSE NULL + END + ) + END AS actual_value, + MAX(CAST(3 AS INT)) AS expected_value_alias, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -12037,6 +12604,71 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{ lib.make_text_constant(i) }}, + {%- else -%} + {{ lib.make_text_constant(i) }} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_else() -%} + {%- if parameters.expected_values|length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {%- endmacro -%} + + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE {{ render_else() }} + END AS actual_value, + MAX(CAST({{ parameters.expected_values | length }} AS INT)) AS expected_value_alias + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE COUNT(DISTINCT + CASE + WHEN analyzed_table."target_column" IN ('USD','GBP','EUR') + THEN analyzed_table."target_column" + ELSE NULL + END + ) + END AS actual_value, + MAX(CAST(3 AS INT)) AS expected_value_alias, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/accepted_values/expected-texts-in-top-values-count.md b/docs/checks/column/accepted_values/expected-texts-in-top-values-count.md index 38731307db..2ed60621c5 100644 --- a/docs/checks/column/accepted_values/expected-texts-in-top-values-count.md +++ b/docs/checks/column/accepted_values/expected-texts-in-top-values-count.md @@ -2289,6 +2289,134 @@ spec: ) AS top_values WHERE top_values_rank <= 3 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_from_subquery() -%} + FROM + ( + SELECT + top_col_values.top_value as top_value, + {% if lib.time_series is not none -%} + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + {% endif -%} + RANK() OVER({{- render_data_grouping('top_col_values', indentation = ' ', partition_by_enabled=true) }} + ORDER BY top_col_values.total_values DESC) as top_values_rank {{- render_data_grouping('top_col_values', indentation = ' ') }} + FROM + ( + SELECT + {{ lib.render_target_column('analyzed_table') }} AS top_value, + COUNT(*) AS total_values + {{- lib.render_data_grouping_projections('analyzed_table', indentation = ' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation = ' ') }} + FROM + {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(extra_filter = lib.render_target_column('analyzed_table') ~ ' IS NOT NULL', indentation = ' ') }} + GROUP BY {{ render_grouping_columns() -}} top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= {{ parameters.top }} + {%- endmacro -%} + + {% macro render_grouping_columns() %} + {%- if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none -%} + {{ lib.render_grouping_column_names() }} {{- ', ' -}} + {%- endif -%} + {% endmacro %} + + {%- macro render_data_grouping(table_alias_prefix = '', indentation = '', partition_by_enabled = false) -%} + + {%- if partition_by_enabled == true -%}PARTITION BY + {%- if lib.time_series is not none -%} + {{" "}}top_col_values.time_period + {%- elif lib.data_groupings is none -%} + {{" "}}NULL + {%- endif -%} + {%- endif -%} + + {%- if lib.data_groupings is not none and (lib.data_groupings | length()) > 0 -%} + {%- for attribute in lib.data_groupings -%} + {{- "" if loop.first and lib.time_series is none and partition_by_enabled else "," -}} + {%- with data_grouping_level = lib.data_groupings[attribute] -%} + {%- if data_grouping_level.source == 'tag' -%} + {{ indentation }}{{ lib.make_text_constant(data_grouping_level.tag) }} + {%- elif data_grouping_level.source == 'column_value' -%} + {{ indentation }}{{ table_alias_prefix }}.grouping_{{ attribute }} + {%- endif -%} + {%- endwith %} + {%- endfor -%} + {%- endif -%} + {%- endmacro -%} + + SELECT + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 %} + MAX(1 + NULL) AS actual_value, + MAX(0) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {%- else %} + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ({{ extract_in_list(parameters.expected_values) }}) THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {%- if lib.time_series is not none -%} {{- "," }} + top_values.time_period, + top_values.time_period_utc + {%- endif -%} + {{- render_data_grouping('top_values', indentation = lib.eol() ~ ' ') }} + {{ render_from_subquery() }} + {%- endif -%} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ('USD', 'GBP', 'EUR') THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX(3) AS expected_value + FROM + ( + SELECT + top_col_values.top_value as top_value, + RANK() OVER(PARTITION BY NULL + ORDER BY top_col_values.total_values DESC) as top_values_rank + FROM + ( + SELECT + analyzed_table."target_column" AS top_value, + COUNT(*) AS total_values + FROM + ""."" AS analyzed_table + WHERE (analyzed_table."target_column" IS NOT NULL) + GROUP BY top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= 3 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4715,6 +4843,138 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS top_values WHERE top_values_rank <= 3GROUP BY top_values.grouping_level_1top_values.grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_from_subquery() -%} + FROM + ( + SELECT + top_col_values.top_value as top_value, + {% if lib.time_series is not none -%} + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + {% endif -%} + RANK() OVER({{- render_data_grouping('top_col_values', indentation = ' ', partition_by_enabled=true) }} + ORDER BY top_col_values.total_values DESC) as top_values_rank {{- render_data_grouping('top_col_values', indentation = ' ') }} + FROM + ( + SELECT + {{ lib.render_target_column('analyzed_table') }} AS top_value, + COUNT(*) AS total_values + {{- lib.render_data_grouping_projections('analyzed_table', indentation = ' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation = ' ') }} + FROM + {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(extra_filter = lib.render_target_column('analyzed_table') ~ ' IS NOT NULL', indentation = ' ') }} + GROUP BY {{ render_grouping_columns() -}} top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= {{ parameters.top }} + {%- endmacro -%} + + {% macro render_grouping_columns() %} + {%- if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none -%} + {{ lib.render_grouping_column_names() }} {{- ', ' -}} + {%- endif -%} + {% endmacro %} + + {%- macro render_data_grouping(table_alias_prefix = '', indentation = '', partition_by_enabled = false) -%} + + {%- if partition_by_enabled == true -%}PARTITION BY + {%- if lib.time_series is not none -%} + {{" "}}top_col_values.time_period + {%- elif lib.data_groupings is none -%} + {{" "}}NULL + {%- endif -%} + {%- endif -%} + + {%- if lib.data_groupings is not none and (lib.data_groupings | length()) > 0 -%} + {%- for attribute in lib.data_groupings -%} + {{- "" if loop.first and lib.time_series is none and partition_by_enabled else "," -}} + {%- with data_grouping_level = lib.data_groupings[attribute] -%} + {%- if data_grouping_level.source == 'tag' -%} + {{ indentation }}{{ lib.make_text_constant(data_grouping_level.tag) }} + {%- elif data_grouping_level.source == 'column_value' -%} + {{ indentation }}{{ table_alias_prefix }}.grouping_{{ attribute }} + {%- endif -%} + {%- endwith %} + {%- endfor -%} + {%- endif -%} + {%- endmacro -%} + + SELECT + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 %} + MAX(1 + NULL) AS actual_value, + MAX(0) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {%- else %} + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ({{ extract_in_list(parameters.expected_values) }}) THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {%- if lib.time_series is not none -%} {{- "," }} + top_values.time_period, + top_values.time_period_utc + {%- endif -%} + {{- render_data_grouping('top_values', indentation = lib.eol() ~ ' ') }} + {{ render_from_subquery() }} + {%- endif -%} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ('USD', 'GBP', 'EUR') THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX(3) AS expected_value, + top_values.grouping_level_1, + top_values.grouping_level_2 + FROM + ( + SELECT + top_col_values.top_value as top_value, + RANK() OVER(PARTITION BY top_col_values.grouping_level_1, top_col_values.grouping_level_2 + ORDER BY top_col_values.total_values DESC) as top_values_rank, top_col_values.grouping_level_1, top_col_values.grouping_level_2 + FROM + ( + SELECT + analyzed_table."target_column" AS top_value, + COUNT(*) AS total_values, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM + ""."" AS analyzed_table + WHERE (analyzed_table."target_column" IS NOT NULL) + GROUP BY grouping_level_1, grouping_level_2, top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= 3 + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -7145,13 +7405,141 @@ spec: ) AS top_values WHERE top_values_rank <= 3 ``` - ??? example "Trino" + ??? example "Teradata" - === "Sensor template for Trino" + === "Sensor template for Teradata" ```sql+jinja - {% import '/dialects/trino.sql.jinja2' as lib with context -%} - + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_from_subquery() -%} + FROM + ( + SELECT + top_col_values.top_value as top_value, + {% if lib.time_series is not none -%} + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + {% endif -%} + RANK() OVER({{- render_data_grouping('top_col_values', indentation = ' ', partition_by_enabled=true) }} + ORDER BY top_col_values.total_values DESC) as top_values_rank {{- render_data_grouping('top_col_values', indentation = ' ') }} + FROM + ( + SELECT + {{ lib.render_target_column('analyzed_table') }} AS top_value, + COUNT(*) AS total_values + {{- lib.render_data_grouping_projections('analyzed_table', indentation = ' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation = ' ') }} + FROM + {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(extra_filter = lib.render_target_column('analyzed_table') ~ ' IS NOT NULL', indentation = ' ') }} + GROUP BY {{ render_grouping_columns() -}} top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= {{ parameters.top }} + {%- endmacro -%} + + {% macro render_grouping_columns() %} + {%- if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none -%} + {{ lib.render_grouping_column_names() }} {{- ', ' -}} + {%- endif -%} + {% endmacro %} + + {%- macro render_data_grouping(table_alias_prefix = '', indentation = '', partition_by_enabled = false) -%} + + {%- if partition_by_enabled == true -%}PARTITION BY + {%- if lib.time_series is not none -%} + {{" "}}top_col_values.time_period + {%- elif lib.data_groupings is none -%} + {{" "}}NULL + {%- endif -%} + {%- endif -%} + + {%- if lib.data_groupings is not none and (lib.data_groupings | length()) > 0 -%} + {%- for attribute in lib.data_groupings -%} + {{- "" if loop.first and lib.time_series is none and partition_by_enabled else "," -}} + {%- with data_grouping_level = lib.data_groupings[attribute] -%} + {%- if data_grouping_level.source == 'tag' -%} + {{ indentation }}{{ lib.make_text_constant(data_grouping_level.tag) }} + {%- elif data_grouping_level.source == 'column_value' -%} + {{ indentation }}{{ table_alias_prefix }}.grouping_{{ attribute }} + {%- endif -%} + {%- endwith %} + {%- endfor -%} + {%- endif -%} + {%- endmacro -%} + + SELECT + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 %} + MAX(1 + NULL) AS actual_value, + MAX(0) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {%- else %} + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ({{ extract_in_list(parameters.expected_values) }}) THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {%- if lib.time_series is not none -%} {{- "," }} + top_values.time_period, + top_values.time_period_utc + {%- endif -%} + {{- render_data_grouping('top_values', indentation = lib.eol() ~ ' ') }} + {{ render_from_subquery() }} + {%- endif -%} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ('USD', 'GBP', 'EUR') THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX(3) AS expected_value + FROM + ( + SELECT + top_col_values.top_value as top_value, + RANK() OVER(PARTITION BY NULL + ORDER BY top_col_values.total_values DESC) as top_values_rank + FROM + ( + SELECT + analyzed_table."target_column" AS top_value, + COUNT(*) AS total_values + FROM + ""."" AS analyzed_table + WHERE (analyzed_table."target_column" IS NOT NULL) + GROUP BY top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= 3 + ``` + ??? example "Trino" + + === "Sensor template for Trino" + + ```sql+jinja + {% import '/dialects/trino.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} {%- for i in values_list -%} {%- if not loop.last -%} @@ -9572,6 +9960,138 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS top_values WHERE top_values_rank <= 3GROUP BY top_values.grouping_level_1top_values.grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_from_subquery() -%} + FROM + ( + SELECT + top_col_values.top_value as top_value, + {% if lib.time_series is not none -%} + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + {% endif -%} + RANK() OVER({{- render_data_grouping('top_col_values', indentation = ' ', partition_by_enabled=true) }} + ORDER BY top_col_values.total_values DESC) as top_values_rank {{- render_data_grouping('top_col_values', indentation = ' ') }} + FROM + ( + SELECT + {{ lib.render_target_column('analyzed_table') }} AS top_value, + COUNT(*) AS total_values + {{- lib.render_data_grouping_projections('analyzed_table', indentation = ' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation = ' ') }} + FROM + {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(extra_filter = lib.render_target_column('analyzed_table') ~ ' IS NOT NULL', indentation = ' ') }} + GROUP BY {{ render_grouping_columns() -}} top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= {{ parameters.top }} + {%- endmacro -%} + + {% macro render_grouping_columns() %} + {%- if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none -%} + {{ lib.render_grouping_column_names() }} {{- ', ' -}} + {%- endif -%} + {% endmacro %} + + {%- macro render_data_grouping(table_alias_prefix = '', indentation = '', partition_by_enabled = false) -%} + + {%- if partition_by_enabled == true -%}PARTITION BY + {%- if lib.time_series is not none -%} + {{" "}}top_col_values.time_period + {%- elif lib.data_groupings is none -%} + {{" "}}NULL + {%- endif -%} + {%- endif -%} + + {%- if lib.data_groupings is not none and (lib.data_groupings | length()) > 0 -%} + {%- for attribute in lib.data_groupings -%} + {{- "" if loop.first and lib.time_series is none and partition_by_enabled else "," -}} + {%- with data_grouping_level = lib.data_groupings[attribute] -%} + {%- if data_grouping_level.source == 'tag' -%} + {{ indentation }}{{ lib.make_text_constant(data_grouping_level.tag) }} + {%- elif data_grouping_level.source == 'column_value' -%} + {{ indentation }}{{ table_alias_prefix }}.grouping_{{ attribute }} + {%- endif -%} + {%- endwith %} + {%- endfor -%} + {%- endif -%} + {%- endmacro -%} + + SELECT + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 %} + MAX(1 + NULL) AS actual_value, + MAX(0) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {%- else %} + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ({{ extract_in_list(parameters.expected_values) }}) THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {%- if lib.time_series is not none -%} {{- "," }} + top_values.time_period, + top_values.time_period_utc + {%- endif -%} + {{- render_data_grouping('top_values', indentation = lib.eol() ~ ' ') }} + {{ render_from_subquery() }} + {%- endif -%} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ('USD', 'GBP', 'EUR') THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX(3) AS expected_value, + top_values.grouping_level_1, + top_values.grouping_level_2 + FROM + ( + SELECT + top_col_values.top_value as top_value, + RANK() OVER(PARTITION BY top_col_values.grouping_level_1, top_col_values.grouping_level_2 + ORDER BY top_col_values.total_values DESC) as top_values_rank, top_col_values.grouping_level_1, top_col_values.grouping_level_2 + FROM + ( + SELECT + analyzed_table."target_column" AS top_value, + COUNT(*) AS total_values, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM + ""."" AS analyzed_table + WHERE (analyzed_table."target_column" IS NOT NULL) + GROUP BY grouping_level_1, grouping_level_2, top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= 3 + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -12002,6 +12522,134 @@ spec: ) AS top_values WHERE top_values_rank <= 3 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_from_subquery() -%} + FROM + ( + SELECT + top_col_values.top_value as top_value, + {% if lib.time_series is not none -%} + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + {% endif -%} + RANK() OVER({{- render_data_grouping('top_col_values', indentation = ' ', partition_by_enabled=true) }} + ORDER BY top_col_values.total_values DESC) as top_values_rank {{- render_data_grouping('top_col_values', indentation = ' ') }} + FROM + ( + SELECT + {{ lib.render_target_column('analyzed_table') }} AS top_value, + COUNT(*) AS total_values + {{- lib.render_data_grouping_projections('analyzed_table', indentation = ' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation = ' ') }} + FROM + {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(extra_filter = lib.render_target_column('analyzed_table') ~ ' IS NOT NULL', indentation = ' ') }} + GROUP BY {{ render_grouping_columns() -}} top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= {{ parameters.top }} + {%- endmacro -%} + + {% macro render_grouping_columns() %} + {%- if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none -%} + {{ lib.render_grouping_column_names() }} {{- ', ' -}} + {%- endif -%} + {% endmacro %} + + {%- macro render_data_grouping(table_alias_prefix = '', indentation = '', partition_by_enabled = false) -%} + + {%- if partition_by_enabled == true -%}PARTITION BY + {%- if lib.time_series is not none -%} + {{" "}}top_col_values.time_period + {%- elif lib.data_groupings is none -%} + {{" "}}NULL + {%- endif -%} + {%- endif -%} + + {%- if lib.data_groupings is not none and (lib.data_groupings | length()) > 0 -%} + {%- for attribute in lib.data_groupings -%} + {{- "" if loop.first and lib.time_series is none and partition_by_enabled else "," -}} + {%- with data_grouping_level = lib.data_groupings[attribute] -%} + {%- if data_grouping_level.source == 'tag' -%} + {{ indentation }}{{ lib.make_text_constant(data_grouping_level.tag) }} + {%- elif data_grouping_level.source == 'column_value' -%} + {{ indentation }}{{ table_alias_prefix }}.grouping_{{ attribute }} + {%- endif -%} + {%- endwith %} + {%- endfor -%} + {%- endif -%} + {%- endmacro -%} + + SELECT + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 %} + MAX(1 + NULL) AS actual_value, + MAX(0) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {%- else %} + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ({{ extract_in_list(parameters.expected_values) }}) THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {%- if lib.time_series is not none -%} {{- "," }} + top_values.time_period, + top_values.time_period_utc + {%- endif -%} + {{- render_data_grouping('top_values', indentation = lib.eol() ~ ' ') }} + {{ render_from_subquery() }} + {%- endif -%} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ('USD', 'GBP', 'EUR') THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX(3) AS expected_value + FROM + ( + SELECT + top_col_values.top_value as top_value, + RANK() OVER(PARTITION BY NULL + ORDER BY top_col_values.total_values DESC) as top_values_rank + FROM + ( + SELECT + analyzed_table."target_column" AS top_value, + COUNT(*) AS total_values + FROM + ""."" AS analyzed_table + WHERE (analyzed_table."target_column" IS NOT NULL) + GROUP BY top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= 3 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -14429,6 +15077,138 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS top_values WHERE top_values_rank <= 3GROUP BY top_values.grouping_level_1top_values.grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_from_subquery() -%} + FROM + ( + SELECT + top_col_values.top_value as top_value, + {% if lib.time_series is not none -%} + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + {% endif -%} + RANK() OVER({{- render_data_grouping('top_col_values', indentation = ' ', partition_by_enabled=true) }} + ORDER BY top_col_values.total_values DESC) as top_values_rank {{- render_data_grouping('top_col_values', indentation = ' ') }} + FROM + ( + SELECT + {{ lib.render_target_column('analyzed_table') }} AS top_value, + COUNT(*) AS total_values + {{- lib.render_data_grouping_projections('analyzed_table', indentation = ' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation = ' ') }} + FROM + {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(extra_filter = lib.render_target_column('analyzed_table') ~ ' IS NOT NULL', indentation = ' ') }} + GROUP BY {{ render_grouping_columns() -}} top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= {{ parameters.top }} + {%- endmacro -%} + + {% macro render_grouping_columns() %} + {%- if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none -%} + {{ lib.render_grouping_column_names() }} {{- ', ' -}} + {%- endif -%} + {% endmacro %} + + {%- macro render_data_grouping(table_alias_prefix = '', indentation = '', partition_by_enabled = false) -%} + + {%- if partition_by_enabled == true -%}PARTITION BY + {%- if lib.time_series is not none -%} + {{" "}}top_col_values.time_period + {%- elif lib.data_groupings is none -%} + {{" "}}NULL + {%- endif -%} + {%- endif -%} + + {%- if lib.data_groupings is not none and (lib.data_groupings | length()) > 0 -%} + {%- for attribute in lib.data_groupings -%} + {{- "" if loop.first and lib.time_series is none and partition_by_enabled else "," -}} + {%- with data_grouping_level = lib.data_groupings[attribute] -%} + {%- if data_grouping_level.source == 'tag' -%} + {{ indentation }}{{ lib.make_text_constant(data_grouping_level.tag) }} + {%- elif data_grouping_level.source == 'column_value' -%} + {{ indentation }}{{ table_alias_prefix }}.grouping_{{ attribute }} + {%- endif -%} + {%- endwith %} + {%- endfor -%} + {%- endif -%} + {%- endmacro -%} + + SELECT + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 %} + MAX(1 + NULL) AS actual_value, + MAX(0) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {%- else %} + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ({{ extract_in_list(parameters.expected_values) }}) THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {%- if lib.time_series is not none -%} {{- "," }} + top_values.time_period, + top_values.time_period_utc + {%- endif -%} + {{- render_data_grouping('top_values', indentation = lib.eol() ~ ' ') }} + {{ render_from_subquery() }} + {%- endif -%} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ('USD', 'GBP', 'EUR') THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX(3) AS expected_value, + top_values.grouping_level_1, + top_values.grouping_level_2 + FROM + ( + SELECT + top_col_values.top_value as top_value, + RANK() OVER(PARTITION BY top_col_values.grouping_level_1, top_col_values.grouping_level_2 + ORDER BY top_col_values.total_values DESC) as top_values_rank, top_col_values.grouping_level_1, top_col_values.grouping_level_2 + FROM + ( + SELECT + analyzed_table."target_column" AS top_value, + COUNT(*) AS total_values, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM + ""."" AS analyzed_table + WHERE (analyzed_table."target_column" IS NOT NULL) + GROUP BY grouping_level_1, grouping_level_2, top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= 3 + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -17003,13 +17783,149 @@ spec: ) AS top_values WHERE top_values_rank <= 3GROUP BY time_period, time_period_utc ``` - ??? example "Trino" + ??? example "Teradata" - === "Sensor template for Trino" + === "Sensor template for Teradata" ```sql+jinja - {% import '/dialects/trino.sql.jinja2' as lib with context -%} - + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_from_subquery() -%} + FROM + ( + SELECT + top_col_values.top_value as top_value, + {% if lib.time_series is not none -%} + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + {% endif -%} + RANK() OVER({{- render_data_grouping('top_col_values', indentation = ' ', partition_by_enabled=true) }} + ORDER BY top_col_values.total_values DESC) as top_values_rank {{- render_data_grouping('top_col_values', indentation = ' ') }} + FROM + ( + SELECT + {{ lib.render_target_column('analyzed_table') }} AS top_value, + COUNT(*) AS total_values + {{- lib.render_data_grouping_projections('analyzed_table', indentation = ' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation = ' ') }} + FROM + {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(extra_filter = lib.render_target_column('analyzed_table') ~ ' IS NOT NULL', indentation = ' ') }} + GROUP BY {{ render_grouping_columns() -}} top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= {{ parameters.top }} + {%- endmacro -%} + + {% macro render_grouping_columns() %} + {%- if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none -%} + {{ lib.render_grouping_column_names() }} {{- ', ' -}} + {%- endif -%} + {% endmacro %} + + {%- macro render_data_grouping(table_alias_prefix = '', indentation = '', partition_by_enabled = false) -%} + + {%- if partition_by_enabled == true -%}PARTITION BY + {%- if lib.time_series is not none -%} + {{" "}}top_col_values.time_period + {%- elif lib.data_groupings is none -%} + {{" "}}NULL + {%- endif -%} + {%- endif -%} + + {%- if lib.data_groupings is not none and (lib.data_groupings | length()) > 0 -%} + {%- for attribute in lib.data_groupings -%} + {{- "" if loop.first and lib.time_series is none and partition_by_enabled else "," -}} + {%- with data_grouping_level = lib.data_groupings[attribute] -%} + {%- if data_grouping_level.source == 'tag' -%} + {{ indentation }}{{ lib.make_text_constant(data_grouping_level.tag) }} + {%- elif data_grouping_level.source == 'column_value' -%} + {{ indentation }}{{ table_alias_prefix }}.grouping_{{ attribute }} + {%- endif -%} + {%- endwith %} + {%- endfor -%} + {%- endif -%} + {%- endmacro -%} + + SELECT + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 %} + MAX(1 + NULL) AS actual_value, + MAX(0) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {%- else %} + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ({{ extract_in_list(parameters.expected_values) }}) THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {%- if lib.time_series is not none -%} {{- "," }} + top_values.time_period, + top_values.time_period_utc + {%- endif -%} + {{- render_data_grouping('top_values', indentation = lib.eol() ~ ' ') }} + {{ render_from_subquery() }} + {%- endif -%} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ('USD', 'GBP', 'EUR') THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX(3) AS expected_value, + top_values.time_period, + top_values.time_period_utc + FROM + ( + SELECT + top_col_values.top_value as top_value, + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + RANK() OVER(PARTITION BY top_col_values.time_period + ORDER BY top_col_values.total_values DESC) as top_values_rank + FROM + ( + SELECT + analyzed_table."target_column" AS top_value, + COUNT(*) AS total_values, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM + ""."" AS analyzed_table + WHERE (analyzed_table."target_column" IS NOT NULL) + GROUP BY time_period, time_period_utc, top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= 3 + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` + ??? example "Trino" + + === "Sensor template for Trino" + + ```sql+jinja + {% import '/dialects/trino.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} {%- for i in values_list -%} {%- if not loop.last -%} @@ -19554,6 +20470,144 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS top_values WHERE top_values_rank <= 3GROUP BY time_period, time_period_utc, top_values.grouping_level_1top_values.grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_from_subquery() -%} + FROM + ( + SELECT + top_col_values.top_value as top_value, + {% if lib.time_series is not none -%} + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + {% endif -%} + RANK() OVER({{- render_data_grouping('top_col_values', indentation = ' ', partition_by_enabled=true) }} + ORDER BY top_col_values.total_values DESC) as top_values_rank {{- render_data_grouping('top_col_values', indentation = ' ') }} + FROM + ( + SELECT + {{ lib.render_target_column('analyzed_table') }} AS top_value, + COUNT(*) AS total_values + {{- lib.render_data_grouping_projections('analyzed_table', indentation = ' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation = ' ') }} + FROM + {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(extra_filter = lib.render_target_column('analyzed_table') ~ ' IS NOT NULL', indentation = ' ') }} + GROUP BY {{ render_grouping_columns() -}} top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= {{ parameters.top }} + {%- endmacro -%} + + {% macro render_grouping_columns() %} + {%- if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none -%} + {{ lib.render_grouping_column_names() }} {{- ', ' -}} + {%- endif -%} + {% endmacro %} + + {%- macro render_data_grouping(table_alias_prefix = '', indentation = '', partition_by_enabled = false) -%} + + {%- if partition_by_enabled == true -%}PARTITION BY + {%- if lib.time_series is not none -%} + {{" "}}top_col_values.time_period + {%- elif lib.data_groupings is none -%} + {{" "}}NULL + {%- endif -%} + {%- endif -%} + + {%- if lib.data_groupings is not none and (lib.data_groupings | length()) > 0 -%} + {%- for attribute in lib.data_groupings -%} + {{- "" if loop.first and lib.time_series is none and partition_by_enabled else "," -}} + {%- with data_grouping_level = lib.data_groupings[attribute] -%} + {%- if data_grouping_level.source == 'tag' -%} + {{ indentation }}{{ lib.make_text_constant(data_grouping_level.tag) }} + {%- elif data_grouping_level.source == 'column_value' -%} + {{ indentation }}{{ table_alias_prefix }}.grouping_{{ attribute }} + {%- endif -%} + {%- endwith %} + {%- endfor -%} + {%- endif -%} + {%- endmacro -%} + + SELECT + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 %} + MAX(1 + NULL) AS actual_value, + MAX(0) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {%- else %} + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ({{ extract_in_list(parameters.expected_values) }}) THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {%- if lib.time_series is not none -%} {{- "," }} + top_values.time_period, + top_values.time_period_utc + {%- endif -%} + {{- render_data_grouping('top_values', indentation = lib.eol() ~ ' ') }} + {{ render_from_subquery() }} + {%- endif -%} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ('USD', 'GBP', 'EUR') THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX(3) AS expected_value, + top_values.time_period, + top_values.time_period_utc, + top_values.grouping_level_1, + top_values.grouping_level_2 + FROM + ( + SELECT + top_col_values.top_value as top_value, + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + RANK() OVER(PARTITION BY top_col_values.time_period, top_col_values.grouping_level_1, top_col_values.grouping_level_2 + ORDER BY top_col_values.total_values DESC) as top_values_rank, top_col_values.grouping_level_1, top_col_values.grouping_level_2 + FROM + ( + SELECT + analyzed_table."target_column" AS top_value, + COUNT(*) AS total_values, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM + ""."" AS analyzed_table + WHERE (analyzed_table."target_column" IS NOT NULL) + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc, top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= 3 + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -21580,10 +22634,149 @@ spec: ``` ??? example "Redshift" - === "Sensor template for Redshift" + === "Sensor template for Redshift" + + ```sql+jinja + {% import '/dialects/redshift.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_from_subquery() -%} + FROM + ( + SELECT + top_col_values.top_value as top_value, + {% if lib.time_series is not none -%} + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + {% endif -%} + RANK() OVER({{- render_data_grouping('top_col_values', indentation = ' ', partition_by_enabled=true) }} + ORDER BY top_col_values.total_values DESC) as top_values_rank {{- render_data_grouping('top_col_values', indentation = ' ') }} + FROM + ( + SELECT + {{ lib.render_target_column('analyzed_table') }} AS top_value, + COUNT(*) AS total_values + {{- lib.render_data_grouping_projections('analyzed_table', indentation = ' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation = ' ') }} + FROM + {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(extra_filter = lib.render_target_column('analyzed_table') ~ ' IS NOT NULL', indentation = ' ') }} + GROUP BY {{ render_grouping_columns() -}} top_value + ORDER BY {{ render_grouping_columns() -}} total_values DESC + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= {{ parameters.top }} + {%- endmacro -%} + + {% macro render_grouping_columns() %} + {%- if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none -%} + {{ lib.render_grouping_column_names() }} {{- ', ' -}} + {%- endif -%} + {% endmacro %} + + {%- macro render_data_grouping(table_alias_prefix = '', indentation = '', partition_by_enabled = false) -%} + + {%- if partition_by_enabled == true -%}PARTITION BY + {%- if lib.time_series is not none -%} + {{" "}}top_col_values.time_period + {%- elif lib.data_groupings is none -%} + {{" "}}NULL + {%- endif -%} + {%- endif -%} + + {%- if lib.data_groupings is not none and (lib.data_groupings | length()) > 0 -%} + {%- for attribute in lib.data_groupings -%} + {{- "" if loop.first and lib.time_series is none and partition_by_enabled else "," -}} + {%- with data_grouping_level = lib.data_groupings[attribute] -%} + {%- if data_grouping_level.source == 'tag' -%} + {{ indentation }}{{ lib.make_text_constant(data_grouping_level.tag) }} + {%- elif data_grouping_level.source == 'column_value' -%} + {{ indentation }}{{ table_alias_prefix }}.grouping_{{ attribute }} + {%- endif -%} + {%- endwith %} + {%- endfor -%} + {%- endif -%} + {%- endmacro -%} + + SELECT + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 %} + NULL AS actual_value, + MAX(0) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {%- else %} + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ({{ extract_in_list(parameters.expected_values) }}) THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {%- if lib.time_series is not none -%} {{- "," }} + top_values.time_period, + top_values.time_period_utc + {%- endif -%} + {{- render_data_grouping('top_values', indentation = lib.eol() ~ ' ') }} + {{ render_from_subquery() }} + {%- endif -%} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Redshift" + + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ('USD', 'GBP', 'EUR') THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX(3) AS expected_value, + top_values.time_period, + top_values.time_period_utc + FROM + ( + SELECT + top_col_values.top_value as top_value, + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + RANK() OVER(PARTITION BY top_col_values.time_period + ORDER BY top_col_values.total_values DESC) as top_values_rank + FROM + ( + SELECT + analyzed_table."target_column" AS top_value, + COUNT(*) AS total_values, + DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date)) AS time_period, + CAST((DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc + FROM + "your_redshift_database".""."" AS analyzed_table + WHERE (analyzed_table."target_column" IS NOT NULL) + GROUP BY time_period, time_period_utc, top_value + ORDER BY time_period, time_period_utc, total_values DESC + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= 3 + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` + ??? example "Snowflake" + + === "Sensor template for Snowflake" ```sql+jinja - {% import '/dialects/redshift.sql.jinja2' as lib with context -%} + {% import '/dialects/snowflake.sql.jinja2' as lib with context -%} {%- macro extract_in_list(values_list) -%} {%- for i in values_list -%} @@ -21678,7 +22871,7 @@ spec: {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` - === "Rendered SQL for Redshift" + === "Rendered SQL for Snowflake" ```sql SELECT @@ -21705,9 +22898,9 @@ spec: analyzed_table."target_column" AS top_value, COUNT(*) AS total_values, DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date)) AS time_period, - CAST((DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date))) AS TIMESTAMP WITH TIME ZONE) AS time_period_utc + TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date))) AS time_period_utc FROM - "your_redshift_database".""."" AS analyzed_table + "your_snowflake_database".""."" AS analyzed_table WHERE (analyzed_table."target_column" IS NOT NULL) GROUP BY time_period, time_period_utc, top_value ORDER BY time_period, time_period_utc, total_values DESC @@ -21717,13 +22910,12 @@ spec: GROUP BY time_period, time_period_utc ORDER BY time_period, time_period_utc ``` - ??? example "Snowflake" + ??? example "Spark" - === "Sensor template for Snowflake" + === "Sensor template for Spark" ```sql+jinja - {% import '/dialects/snowflake.sql.jinja2' as lib with context -%} - + {% import '/dialects/spark.sql.jinja2' as lib with context -%} {%- macro extract_in_list(values_list) -%} {%- for i in values_list -%} {%- if not loop.last -%} @@ -21794,7 +22986,7 @@ spec: SELECT {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 %} - NULL AS actual_value, + MAX(1 + NULL) AS actual_value, MAX(0) AS expected_value {{- lib.render_data_grouping_projections('analyzed_table') }} {{- lib.render_time_dimension_projection('analyzed_table') }} @@ -21817,7 +23009,7 @@ spec: {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` - === "Rendered SQL for Snowflake" + === "Rendered SQL for Spark" ```sql SELECT @@ -21841,13 +23033,13 @@ spec: FROM ( SELECT - analyzed_table."target_column" AS top_value, + analyzed_table.`target_column` AS top_value, COUNT(*) AS total_values, - DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date)) AS time_period, - TO_TIMESTAMP(DATE_TRUNC('MONTH', CAST(analyzed_table."date_column" AS date))) AS time_period_utc + DATE_TRUNC('MONTH', CAST(analyzed_table.`date_column` AS DATE)) AS time_period, + TIMESTAMP(DATE_TRUNC('MONTH', CAST(analyzed_table.`date_column` AS DATE))) AS time_period_utc FROM - "your_snowflake_database".""."" AS analyzed_table - WHERE (analyzed_table."target_column" IS NOT NULL) + ``.`` AS analyzed_table + WHERE (analyzed_table.`target_column` IS NOT NULL) GROUP BY time_period, time_period_utc, top_value ORDER BY time_period, time_period_utc, total_values DESC ) AS top_col_values @@ -21856,12 +23048,13 @@ spec: GROUP BY time_period, time_period_utc ORDER BY time_period, time_period_utc ``` - ??? example "Spark" + ??? example "SQL Server" - === "Sensor template for Spark" + === "Sensor template for SQL Server" ```sql+jinja - {% import '/dialects/spark.sql.jinja2' as lib with context -%} + {% import '/dialects/sqlserver.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} {%- for i in values_list -%} {%- if not loop.last -%} @@ -21887,14 +23080,13 @@ spec: ( SELECT {{ lib.render_target_column('analyzed_table') }} AS top_value, - COUNT(*) AS total_values + COUNT_BIG(*) AS total_values {{- lib.render_data_grouping_projections('analyzed_table', indentation = ' ') }} {{- lib.render_time_dimension_projection('analyzed_table', indentation = ' ') }} FROM {{ lib.render_target_table() }} AS analyzed_table {{- lib.render_where_clause(extra_filter = lib.render_target_column('analyzed_table') ~ ' IS NOT NULL', indentation = ' ') }} - GROUP BY {{ render_grouping_columns() -}} top_value - ORDER BY {{ render_grouping_columns() -}} total_values DESC + GROUP BY {{ render_grouping_columns() -}} {{ lib.render_target_column('analyzed_table') }} ) AS top_col_values ) AS top_values WHERE top_values_rank <= {{ parameters.top }} @@ -21932,13 +23124,13 @@ spec: SELECT {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 %} - MAX(1 + NULL) AS actual_value, + NULL AS actual_value, MAX(0) AS expected_value {{- lib.render_data_grouping_projections('analyzed_table') }} {{- lib.render_time_dimension_projection('analyzed_table') }} FROM {{ lib.render_target_table() }} AS analyzed_table {%- else %} - COUNT(DISTINCT + COUNT_BIG(DISTINCT CASE WHEN top_values.top_value IN ({{ extract_in_list(parameters.expected_values) }}) THEN top_values.top_value ELSE NULL @@ -21952,14 +23144,21 @@ spec: {{- render_data_grouping('top_values', indentation = lib.eol() ~ ' ') }} {{ render_from_subquery() }} {%- endif -%} - {{- lib.render_group_by() -}} - {{- lib.render_order_by() -}} + {% if lib.time_series is not none -%} + GROUP BY time_period, time_period_utc + {%- endif -%} + {%- if (lib.data_groupings is not none and (lib.data_groupings | length) > 0) -%} + {% if lib.time_series is none %}GROUP BY {% endif -%} + {%- for attribute in lib.data_groupings -%} + {{ ', ' if lib.time_series is not none and loop.index == 1 else "" }}top_values.grouping_{{ attribute }} + {%- endfor -%} + {%- endif -%} ``` - === "Rendered SQL for Spark" + === "Rendered SQL for SQL Server" ```sql SELECT - COUNT(DISTINCT + COUNT_BIG(DISTINCT CASE WHEN top_values.top_value IN ('USD', 'GBP', 'EUR') THEN top_values.top_value ELSE NULL @@ -21979,28 +23178,24 @@ spec: FROM ( SELECT - analyzed_table.`target_column` AS top_value, - COUNT(*) AS total_values, - DATE_TRUNC('MONTH', CAST(analyzed_table.`date_column` AS DATE)) AS time_period, - TIMESTAMP(DATE_TRUNC('MONTH', CAST(analyzed_table.`date_column` AS DATE))) AS time_period_utc + analyzed_table.[target_column] AS top_value, + COUNT_BIG(*) AS total_values, + DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) AS time_period, + CAST((DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1)) AS DATETIME) AS time_period_utc FROM - ``.`` AS analyzed_table - WHERE (analyzed_table.`target_column` IS NOT NULL) - GROUP BY time_period, time_period_utc, top_value - ORDER BY time_period, time_period_utc, total_values DESC + [your_sql_server_database].[].[] AS analyzed_table + WHERE (analyzed_table.[target_column] IS NOT NULL) + GROUP BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1), DATEADD(month, DATEDIFF(month, 0, analyzed_table.[date_column]), 0), analyzed_table.[target_column] ) AS top_col_values ) AS top_values - WHERE top_values_rank <= 3 - GROUP BY time_period, time_period_utc - ORDER BY time_period, time_period_utc + WHERE top_values_rank <= 3GROUP BY time_period, time_period_utc ``` - ??? example "SQL Server" + ??? example "Teradata" - === "Sensor template for SQL Server" + === "Sensor template for Teradata" ```sql+jinja - {% import '/dialects/sqlserver.sql.jinja2' as lib with context -%} - + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} {%- macro extract_in_list(values_list) -%} {%- for i in values_list -%} {%- if not loop.last -%} @@ -22026,13 +23221,13 @@ spec: ( SELECT {{ lib.render_target_column('analyzed_table') }} AS top_value, - COUNT_BIG(*) AS total_values + COUNT(*) AS total_values {{- lib.render_data_grouping_projections('analyzed_table', indentation = ' ') }} {{- lib.render_time_dimension_projection('analyzed_table', indentation = ' ') }} FROM {{ lib.render_target_table() }} AS analyzed_table {{- lib.render_where_clause(extra_filter = lib.render_target_column('analyzed_table') ~ ' IS NOT NULL', indentation = ' ') }} - GROUP BY {{ render_grouping_columns() -}} {{ lib.render_target_column('analyzed_table') }} + GROUP BY {{ render_grouping_columns() -}} top_value ) AS top_col_values ) AS top_values WHERE top_values_rank <= {{ parameters.top }} @@ -22070,13 +23265,13 @@ spec: SELECT {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 %} - NULL AS actual_value, + MAX(1 + NULL) AS actual_value, MAX(0) AS expected_value {{- lib.render_data_grouping_projections('analyzed_table') }} {{- lib.render_time_dimension_projection('analyzed_table') }} FROM {{ lib.render_target_table() }} AS analyzed_table {%- else %} - COUNT_BIG(DISTINCT + COUNT(DISTINCT CASE WHEN top_values.top_value IN ({{ extract_in_list(parameters.expected_values) }}) THEN top_values.top_value ELSE NULL @@ -22090,21 +23285,14 @@ spec: {{- render_data_grouping('top_values', indentation = lib.eol() ~ ' ') }} {{ render_from_subquery() }} {%- endif -%} - {% if lib.time_series is not none -%} - GROUP BY time_period, time_period_utc - {%- endif -%} - {%- if (lib.data_groupings is not none and (lib.data_groupings | length) > 0) -%} - {% if lib.time_series is none %}GROUP BY {% endif -%} - {%- for attribute in lib.data_groupings -%} - {{ ', ' if lib.time_series is not none and loop.index == 1 else "" }}top_values.grouping_{{ attribute }} - {%- endfor -%} - {%- endif -%} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} ``` - === "Rendered SQL for SQL Server" + === "Rendered SQL for Teradata" ```sql SELECT - COUNT_BIG(DISTINCT + COUNT(DISTINCT CASE WHEN top_values.top_value IN ('USD', 'GBP', 'EUR') THEN top_values.top_value ELSE NULL @@ -22124,17 +23312,19 @@ spec: FROM ( SELECT - analyzed_table.[target_column] AS top_value, - COUNT_BIG(*) AS total_values, - DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) AS time_period, - CAST((DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1)) AS DATETIME) AS time_period_utc + analyzed_table."target_column" AS top_value, + COUNT(*) AS total_values, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc FROM - [your_sql_server_database].[].[] AS analyzed_table - WHERE (analyzed_table.[target_column] IS NOT NULL) - GROUP BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1), DATEADD(month, DATEDIFF(month, 0, analyzed_table.[date_column]), 0), analyzed_table.[target_column] + ""."" AS analyzed_table + WHERE (analyzed_table."target_column" IS NOT NULL) + GROUP BY time_period, time_period_utc, top_value ) AS top_col_values ) AS top_values - WHERE top_values_rank <= 3GROUP BY time_period, time_period_utc + WHERE top_values_rank <= 3 + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -24687,6 +25877,144 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS top_values WHERE top_values_rank <= 3GROUP BY time_period, time_period_utc, top_values.grouping_level_1top_values.grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_from_subquery() -%} + FROM + ( + SELECT + top_col_values.top_value as top_value, + {% if lib.time_series is not none -%} + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + {% endif -%} + RANK() OVER({{- render_data_grouping('top_col_values', indentation = ' ', partition_by_enabled=true) }} + ORDER BY top_col_values.total_values DESC) as top_values_rank {{- render_data_grouping('top_col_values', indentation = ' ') }} + FROM + ( + SELECT + {{ lib.render_target_column('analyzed_table') }} AS top_value, + COUNT(*) AS total_values + {{- lib.render_data_grouping_projections('analyzed_table', indentation = ' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation = ' ') }} + FROM + {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(extra_filter = lib.render_target_column('analyzed_table') ~ ' IS NOT NULL', indentation = ' ') }} + GROUP BY {{ render_grouping_columns() -}} top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= {{ parameters.top }} + {%- endmacro -%} + + {% macro render_grouping_columns() %} + {%- if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none -%} + {{ lib.render_grouping_column_names() }} {{- ', ' -}} + {%- endif -%} + {% endmacro %} + + {%- macro render_data_grouping(table_alias_prefix = '', indentation = '', partition_by_enabled = false) -%} + + {%- if partition_by_enabled == true -%}PARTITION BY + {%- if lib.time_series is not none -%} + {{" "}}top_col_values.time_period + {%- elif lib.data_groupings is none -%} + {{" "}}NULL + {%- endif -%} + {%- endif -%} + + {%- if lib.data_groupings is not none and (lib.data_groupings | length()) > 0 -%} + {%- for attribute in lib.data_groupings -%} + {{- "" if loop.first and lib.time_series is none and partition_by_enabled else "," -}} + {%- with data_grouping_level = lib.data_groupings[attribute] -%} + {%- if data_grouping_level.source == 'tag' -%} + {{ indentation }}{{ lib.make_text_constant(data_grouping_level.tag) }} + {%- elif data_grouping_level.source == 'column_value' -%} + {{ indentation }}{{ table_alias_prefix }}.grouping_{{ attribute }} + {%- endif -%} + {%- endwith %} + {%- endfor -%} + {%- endif -%} + {%- endmacro -%} + + SELECT + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 %} + MAX(1 + NULL) AS actual_value, + MAX(0) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {%- else %} + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ({{ extract_in_list(parameters.expected_values) }}) THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {%- if lib.time_series is not none -%} {{- "," }} + top_values.time_period, + top_values.time_period_utc + {%- endif -%} + {{- render_data_grouping('top_values', indentation = lib.eol() ~ ' ') }} + {{ render_from_subquery() }} + {%- endif -%} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ('USD', 'GBP', 'EUR') THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX(3) AS expected_value, + top_values.time_period, + top_values.time_period_utc, + top_values.grouping_level_1, + top_values.grouping_level_2 + FROM + ( + SELECT + top_col_values.top_value as top_value, + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + RANK() OVER(PARTITION BY top_col_values.time_period, top_col_values.grouping_level_1, top_col_values.grouping_level_2 + ORDER BY top_col_values.total_values DESC) as top_values_rank, top_col_values.grouping_level_1, top_col_values.grouping_level_2 + FROM + ( + SELECT + analyzed_table."target_column" AS top_value, + COUNT(*) AS total_values, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM + ""."" AS analyzed_table + WHERE (analyzed_table."target_column" IS NOT NULL) + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc, top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= 3 + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/accepted_values/number-found-in-set-percent.md b/docs/checks/column/accepted_values/number-found-in-set-percent.md index f844f39ba1..586ad993df 100644 --- a/docs/checks/column/accepted_values/number-found-in-set-percent.md +++ b/docs/checks/column/accepted_values/number-found-in-set-percent.md @@ -1049,6 +1049,60 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + {#- Two approaches can be taken here. What if COUNT(*) = 0 AND value set is empty? This solution is the most convenient. -#} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2134,6 +2188,62 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + {#- Two approaches can be taken here. What if COUNT(*) = 0 AND value set is empty? This solution is the most convenient. -#} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3245,6 +3355,60 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + {#- Two approaches can be taken here. What if COUNT(*) = 0 AND value set is empty? This solution is the most convenient. -#} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4331,6 +4495,62 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + {#- Two approaches can be taken here. What if COUNT(*) = 0 AND value set is empty? This solution is the most convenient. -#} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5442,6 +5662,60 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + {#- Two approaches can be taken here. What if COUNT(*) = 0 AND value set is empty? This solution is the most convenient. -#} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6529,9 +6803,65 @@ Expand the *Configure with data grouping* section to see additional examples for ``` - ??? example "Trino" + ??? example "Teradata" - === "Sensor template for Trino" + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + {#- Two approaches can be taken here. What if COUNT(*) = 0 AND value set is empty? This solution is the most convenient. -#} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` + ??? example "Trino" + + === "Sensor template for Trino" ```sql+jinja {% import '/dialects/trino.sql.jinja2' as lib with context -%} @@ -7724,6 +8054,64 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + {#- Two approaches can be taken here. What if COUNT(*) = 0 AND value set is empty? This solution is the most convenient. -#} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8867,6 +9255,64 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + {#- Two approaches can be taken here. What if COUNT(*) = 0 AND value set is empty? This solution is the most convenient. -#} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -10067,6 +10513,64 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + {#- Two approaches can be taken here. What if COUNT(*) = 0 AND value set is empty? This solution is the most convenient. -#} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -11210,6 +11714,64 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + {#- Two approaches can be taken here. What if COUNT(*) = 0 AND value set is empty? This solution is the most convenient. -#} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN (2, 3) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/accepted_values/text-found-in-set-percent.md b/docs/checks/column/accepted_values/text-found-in-set-percent.md index 81a5314867..4eac5472b3 100644 --- a/docs/checks/column/accepted_values/text-found-in-set-percent.md +++ b/docs/checks/column/accepted_values/text-found-in-set-percent.md @@ -1127,6 +1127,64 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {% endmacro -%} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN ('USD', 'GBP', 'EUR') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2294,6 +2352,66 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {% endmacro -%} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN ('USD', 'GBP', 'EUR') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3487,6 +3605,64 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {% endmacro -%} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN ('USD', 'GBP', 'EUR') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4655,6 +4831,66 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {% endmacro -%} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN ('USD', 'GBP', 'EUR') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5848,6 +6084,64 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {% endmacro -%} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN ('USD', 'GBP', 'EUR') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6952,12 +7246,76 @@ Expand the *Configure with data grouping* section to see additional examples for GROUP BY grouping_level_1, grouping_level_2 ORDER BY grouping_level_1, grouping_level_2 ``` - ??? example "SQL Server" + ??? example "SQL Server" + + === "Sensor template for SQL Server" + ```sql+jinja + {% import '/dialects/sqlserver.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {% endmacro -%} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT_BIG({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT_BIG({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for SQL Server" + ```sql + SELECT + CASE + WHEN COUNT_BIG(analyzed_table.[target_column]) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table.[target_column] IN ('USD', 'GBP', 'EUR') + THEN 1 + ELSE 0 + END + ) / COUNT_BIG(analyzed_table.[target_column]) + END AS actual_value, + analyzed_table.[country] AS grouping_level_1, + analyzed_table.[state] AS grouping_level_2 + FROM [your_sql_server_database].[].[] AS analyzed_table + GROUP BY analyzed_table.[country], analyzed_table.[state] + ORDER BY level_1, level_2 + , + + + + ``` + ??? example "Teradata" - === "Sensor template for SQL Server" + === "Sensor template for Teradata" ```sql+jinja - {% import '/dialects/sqlserver.sql.jinja2' as lib with context -%} - + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} {%- macro extract_in_list(values_list) -%} {%- for i in values_list -%} {%- if not loop.last -%} @@ -6973,14 +7331,14 @@ Expand the *Configure with data grouping* section to see additional examples for MAX(0.0) {%- else -%} CASE - WHEN COUNT_BIG({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 ELSE 100.0 * SUM( CASE WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) THEN 1 ELSE 0 END - ) / COUNT_BIG({{ lib.render_target_column('analyzed_table') }}) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) END {%- endif -%} {% endmacro -%} @@ -6994,28 +7352,24 @@ Expand the *Configure with data grouping* section to see additional examples for {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` - === "Rendered SQL for SQL Server" + === "Rendered SQL for Teradata" ```sql SELECT CASE - WHEN COUNT_BIG(analyzed_table.[target_column]) = 0 THEN 100.0 + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 ELSE 100.0 * SUM( CASE - WHEN analyzed_table.[target_column] IN ('USD', 'GBP', 'EUR') + WHEN analyzed_table."target_column" IN ('USD', 'GBP', 'EUR') THEN 1 ELSE 0 END - ) / COUNT_BIG(analyzed_table.[target_column]) + ) / COUNT(analyzed_table."target_column") END AS actual_value, - analyzed_table.[country] AS grouping_level_1, - analyzed_table.[state] AS grouping_level_2 - FROM [your_sql_server_database].[].[] AS analyzed_table - GROUP BY analyzed_table.[country], analyzed_table.[state] - ORDER BY level_1, level_2 - , - - - + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -8294,6 +8648,68 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {% endmacro -%} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN ('USD', 'GBP', 'EUR') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9519,6 +9935,68 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {% endmacro -%} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN ('USD', 'GBP', 'EUR') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -10801,6 +11279,68 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {% endmacro -%} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN ('USD', 'GBP', 'EUR') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -12026,6 +12566,68 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {% endmacro -%} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IN ('USD', 'GBP', 'EUR') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/accepted_values/text-valid-country-code-percent.md b/docs/checks/column/accepted_values/text-valid-country-code-percent.md index ff6bffb259..0d03b0e753 100644 --- a/docs/checks/column/accepted_values/text-valid-country-code-percent.md +++ b/docs/checks/column/accepted_values/text-valid-country-code-percent.md @@ -814,6 +814,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1653,6 +1693,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2518,6 +2600,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3358,6 +3480,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4223,6 +4387,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5063,6 +5267,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6013,6 +6259,50 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6910,6 +7200,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7864,6 +8198,50 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8761,6 +9139,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/accepted_values/text-valid-currency-code-percent.md b/docs/checks/column/accepted_values/text-valid-currency-code-percent.md index 8d350e2122..af01d75e73 100644 --- a/docs/checks/column/accepted_values/text-valid-currency-code-percent.md +++ b/docs/checks/column/accepted_values/text-valid-currency-code-percent.md @@ -821,6 +821,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1669,6 +1709,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2543,6 +2625,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3392,6 +3514,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4266,6 +4430,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5115,6 +5319,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6074,6 +6320,50 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6980,6 +7270,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7943,6 +8277,50 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8849,6 +9227,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER(analyzed_table."target_column") IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/accuracy/total-average-match-percent.md b/docs/checks/column/accuracy/total-average-match-percent.md index e5e2ae7fe9..62bbed6bf4 100644 --- a/docs/checks/column/accuracy/total-average-match-percent.md +++ b/docs/checks/column/accuracy/total-average-match-percent.md @@ -686,6 +686,41 @@ spec: AVG(analyzed_table.[target_column] * 1.0) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + AVG(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + AVG(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1400,6 +1435,41 @@ spec: AVG(analyzed_table.[target_column] * 1.0) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + AVG(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + AVG(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2114,6 +2184,41 @@ spec: AVG(analyzed_table.[target_column] * 1.0) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + AVG(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + AVG(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/accuracy/total-max-match-percent.md b/docs/checks/column/accuracy/total-max-match-percent.md index 84bb274772..33b0b897c5 100644 --- a/docs/checks/column/accuracy/total-max-match-percent.md +++ b/docs/checks/column/accuracy/total-max-match-percent.md @@ -679,6 +679,41 @@ spec: MAX(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + MAX(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + MAX(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + MAX(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1386,6 +1421,41 @@ spec: MAX(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + MAX(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + MAX(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + MAX(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2093,6 +2163,41 @@ spec: MAX(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + MAX(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + MAX(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + MAX(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/accuracy/total-min-match-percent.md b/docs/checks/column/accuracy/total-min-match-percent.md index 7a0779f946..4b57b73d87 100644 --- a/docs/checks/column/accuracy/total-min-match-percent.md +++ b/docs/checks/column/accuracy/total-min-match-percent.md @@ -683,6 +683,41 @@ spec: MIN(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + MIN(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + MIN(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + MIN(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1394,6 +1429,41 @@ spec: MIN(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + MIN(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + MIN(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + MIN(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2105,6 +2175,41 @@ spec: MIN(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + MIN(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + MIN(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + MIN(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/accuracy/total-not-null-count-match-percent.md b/docs/checks/column/accuracy/total-not-null-count-match-percent.md index 77399ab513..0c7e658b4d 100644 --- a/docs/checks/column/accuracy/total-not-null-count-match-percent.md +++ b/docs/checks/column/accuracy/total-not-null-count-match-percent.md @@ -683,6 +683,41 @@ spec: COUNT_BIG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + COUNT(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + COUNT({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + COUNT(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + COUNT(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1394,6 +1429,41 @@ spec: COUNT_BIG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + COUNT(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + COUNT({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + COUNT(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + COUNT(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2105,6 +2175,41 @@ spec: COUNT_BIG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + COUNT(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + COUNT({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + COUNT(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + COUNT(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/accuracy/total-sum-match-percent.md b/docs/checks/column/accuracy/total-sum-match-percent.md index 34ac2df178..8421c36494 100644 --- a/docs/checks/column/accuracy/total-sum-match-percent.md +++ b/docs/checks/column/accuracy/total-sum-match-percent.md @@ -693,6 +693,41 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + SUM(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + SUM(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1414,6 +1449,41 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + SUM(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + SUM(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2135,6 +2205,41 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + SUM(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + SUM(referenced_table."customer_id") + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/anomaly/max-anomaly.md b/docs/checks/column/anomaly/max-anomaly.md index cb6eda9cc9..22528e4bbf 100644 --- a/docs/checks/column/anomaly/max-anomaly.md +++ b/docs/checks/column/anomaly/max-anomaly.md @@ -534,6 +534,28 @@ spec: MAX(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1073,6 +1095,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1638,6 +1684,28 @@ spec: MAX(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2178,6 +2246,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2828,6 +2920,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3425,6 +3543,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/anomaly/mean-anomaly.md b/docs/checks/column/anomaly/mean-anomaly.md index 95aecab475..f5dbc4f58d 100644 --- a/docs/checks/column/anomaly/mean-anomaly.md +++ b/docs/checks/column/anomaly/mean-anomaly.md @@ -533,6 +533,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1072,6 +1094,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1637,6 +1683,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2177,6 +2245,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2827,6 +2919,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3424,6 +3542,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/anomaly/mean-change-1-day.md b/docs/checks/column/anomaly/mean-change-1-day.md index aaecd1da7d..b363cf6b38 100644 --- a/docs/checks/column/anomaly/mean-change-1-day.md +++ b/docs/checks/column/anomaly/mean-change-1-day.md @@ -534,6 +534,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1076,6 +1098,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1644,6 +1690,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2187,6 +2255,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2840,6 +2932,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3440,6 +3558,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/anomaly/mean-change-30-days.md b/docs/checks/column/anomaly/mean-change-30-days.md index 2449d3774e..f0d8cf459a 100644 --- a/docs/checks/column/anomaly/mean-change-30-days.md +++ b/docs/checks/column/anomaly/mean-change-30-days.md @@ -535,6 +535,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1077,6 +1099,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1645,6 +1691,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2188,6 +2256,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2841,6 +2933,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3441,6 +3559,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/anomaly/mean-change-7-days.md b/docs/checks/column/anomaly/mean-change-7-days.md index b55a41a15c..80ee024d1f 100644 --- a/docs/checks/column/anomaly/mean-change-7-days.md +++ b/docs/checks/column/anomaly/mean-change-7-days.md @@ -535,6 +535,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1077,6 +1099,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1645,6 +1691,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2188,6 +2256,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2841,6 +2933,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3441,6 +3559,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/anomaly/mean-change.md b/docs/checks/column/anomaly/mean-change.md index 18629d2199..b9af9a40f6 100644 --- a/docs/checks/column/anomaly/mean-change.md +++ b/docs/checks/column/anomaly/mean-change.md @@ -531,6 +531,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1070,6 +1092,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1635,6 +1681,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2175,6 +2243,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2825,6 +2917,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3422,6 +3540,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/anomaly/median-anomaly.md b/docs/checks/column/anomaly/median-anomaly.md index c1eb6c6736..ab18397abc 100644 --- a/docs/checks/column/anomaly/median-anomaly.md +++ b/docs/checks/column/anomaly/median-anomaly.md @@ -801,6 +801,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1639,6 +1664,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2506,6 +2558,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3345,6 +3422,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4306,6 +4410,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5228,6 +5361,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/anomaly/median-change-1-day.md b/docs/checks/column/anomaly/median-change-1-day.md index 3c26c973a0..d9a0f53abc 100644 --- a/docs/checks/column/anomaly/median-change-1-day.md +++ b/docs/checks/column/anomaly/median-change-1-day.md @@ -802,6 +802,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1643,6 +1668,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2513,6 +2565,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3355,6 +3432,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4319,6 +4423,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5244,6 +5377,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/anomaly/median-change-30-days.md b/docs/checks/column/anomaly/median-change-30-days.md index 09810494eb..9dc044a6a3 100644 --- a/docs/checks/column/anomaly/median-change-30-days.md +++ b/docs/checks/column/anomaly/median-change-30-days.md @@ -803,6 +803,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1644,6 +1669,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2514,6 +2566,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3356,6 +3433,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4320,6 +4424,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5245,6 +5378,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/anomaly/median-change-7-days.md b/docs/checks/column/anomaly/median-change-7-days.md index 34b9d7ad78..f990c64b6f 100644 --- a/docs/checks/column/anomaly/median-change-7-days.md +++ b/docs/checks/column/anomaly/median-change-7-days.md @@ -803,6 +803,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1644,6 +1669,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2514,6 +2566,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3356,6 +3433,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4320,6 +4424,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5245,6 +5378,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/anomaly/median-change.md b/docs/checks/column/anomaly/median-change.md index f8e9849f8c..6a29e02c60 100644 --- a/docs/checks/column/anomaly/median-change.md +++ b/docs/checks/column/anomaly/median-change.md @@ -799,6 +799,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1637,6 +1662,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2504,6 +2556,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3343,6 +3420,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4304,6 +4408,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5226,6 +5359,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/anomaly/min-anomaly.md b/docs/checks/column/anomaly/min-anomaly.md index c4ee24ec35..3cdbff794b 100644 --- a/docs/checks/column/anomaly/min-anomaly.md +++ b/docs/checks/column/anomaly/min-anomaly.md @@ -534,6 +534,28 @@ spec: MIN(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1073,6 +1095,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1638,6 +1684,28 @@ spec: MIN(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2178,6 +2246,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2828,6 +2920,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3425,6 +3543,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/anomaly/sum-anomaly.md b/docs/checks/column/anomaly/sum-anomaly.md index 657eedf13a..4c79027fc0 100644 --- a/docs/checks/column/anomaly/sum-anomaly.md +++ b/docs/checks/column/anomaly/sum-anomaly.md @@ -533,6 +533,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1072,6 +1094,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1637,6 +1683,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2177,6 +2245,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2827,6 +2919,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3424,6 +3542,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/anomaly/sum-change-1-day.md b/docs/checks/column/anomaly/sum-change-1-day.md index 4d01cea789..be179ea533 100644 --- a/docs/checks/column/anomaly/sum-change-1-day.md +++ b/docs/checks/column/anomaly/sum-change-1-day.md @@ -534,6 +534,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1076,6 +1098,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1644,6 +1690,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2187,6 +2255,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2840,6 +2932,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3440,6 +3558,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/anomaly/sum-change-30-days.md b/docs/checks/column/anomaly/sum-change-30-days.md index 5335e76613..804b3d9257 100644 --- a/docs/checks/column/anomaly/sum-change-30-days.md +++ b/docs/checks/column/anomaly/sum-change-30-days.md @@ -535,6 +535,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1077,6 +1099,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1645,6 +1691,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2188,6 +2256,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2841,6 +2933,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3441,6 +3559,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/anomaly/sum-change-7-days.md b/docs/checks/column/anomaly/sum-change-7-days.md index 8c96d9b3f4..b4645752cc 100644 --- a/docs/checks/column/anomaly/sum-change-7-days.md +++ b/docs/checks/column/anomaly/sum-change-7-days.md @@ -535,6 +535,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1077,6 +1099,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1645,6 +1691,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2188,6 +2256,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2841,6 +2933,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3441,6 +3559,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/anomaly/sum-change.md b/docs/checks/column/anomaly/sum-change.md index 8d977af2b7..91b44436e8 100644 --- a/docs/checks/column/anomaly/sum-change.md +++ b/docs/checks/column/anomaly/sum-change.md @@ -531,6 +531,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1070,6 +1092,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1635,6 +1681,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2175,6 +2243,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2825,6 +2917,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3422,6 +3540,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/bool/false-percent.md b/docs/checks/column/bool/false-percent.md index b67365ca38..d5ecf5c185 100644 --- a/docs/checks/column/bool/false-percent.md +++ b/docs/checks/column/bool/false-percent.md @@ -809,6 +809,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1644,6 +1684,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2505,6 +2587,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3341,6 +3463,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4202,6 +4366,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5038,6 +5242,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5984,6 +6230,50 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6877,6 +7167,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7827,6 +8161,50 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8720,6 +9098,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/bool/true-percent.md b/docs/checks/column/bool/true-percent.md index 9ab001ff35..ba07bea776 100644 --- a/docs/checks/column/bool/true-percent.md +++ b/docs/checks/column/bool/true-percent.md @@ -808,6 +808,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 1 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 1 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1642,6 +1682,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 1 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 1 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2502,6 +2584,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 1 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 1 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3337,6 +3459,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 1 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 1 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4197,6 +4361,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 1 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 1 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5032,6 +5236,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 1 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 1 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5977,6 +6223,50 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 1 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 1 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6869,6 +7159,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 1 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 1 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7818,6 +8152,50 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 1 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 1 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8710,6 +9088,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 1 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" = 1 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/comparisons/max-match.md b/docs/checks/column/comparisons/max-match.md index d11b89f8d2..e757184016 100644 --- a/docs/checks/column/comparisons/max-match.md +++ b/docs/checks/column/comparisons/max-match.md @@ -553,6 +553,28 @@ spec: MAX(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1131,6 +1153,28 @@ spec: MAX(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1709,6 +1753,28 @@ spec: MAX(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2372,6 +2438,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3042,6 +3134,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/comparisons/mean-match.md b/docs/checks/column/comparisons/mean-match.md index 5e7a21330c..3a0a623446 100644 --- a/docs/checks/column/comparisons/mean-match.md +++ b/docs/checks/column/comparisons/mean-match.md @@ -553,6 +553,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1131,6 +1153,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1709,6 +1753,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2372,6 +2438,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3042,6 +3134,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/comparisons/min-match.md b/docs/checks/column/comparisons/min-match.md index f5d801119b..4559cdcbda 100644 --- a/docs/checks/column/comparisons/min-match.md +++ b/docs/checks/column/comparisons/min-match.md @@ -553,6 +553,28 @@ spec: MIN(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1131,6 +1153,28 @@ spec: MIN(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1709,6 +1753,28 @@ spec: MIN(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2372,6 +2438,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3042,6 +3134,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/comparisons/not-null-count-match.md b/docs/checks/column/comparisons/not-null-count-match.md index ffb2b72efa..2d90dd3626 100644 --- a/docs/checks/column/comparisons/not-null-count-match.md +++ b/docs/checks/column/comparisons/not-null-count-match.md @@ -571,6 +571,30 @@ spec: COUNT_BIG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1167,6 +1191,30 @@ spec: COUNT_BIG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1763,6 +1811,30 @@ spec: COUNT_BIG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2444,6 +2516,34 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3132,6 +3232,34 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/comparisons/null-count-match.md b/docs/checks/column/comparisons/null-count-match.md index 9be6802126..ea83a44ccb 100644 --- a/docs/checks/column/comparisons/null-count-match.md +++ b/docs/checks/column/comparisons/null-count-match.md @@ -713,6 +713,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1461,6 +1493,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2209,6 +2273,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3042,6 +3138,42 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3882,6 +4014,42 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/comparisons/sum-match.md b/docs/checks/column/comparisons/sum-match.md index edf776207a..8e6b0b0a4b 100644 --- a/docs/checks/column/comparisons/sum-match.md +++ b/docs/checks/column/comparisons/sum-match.md @@ -553,6 +553,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1131,6 +1153,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1709,6 +1753,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2372,6 +2438,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3042,6 +3134,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/conversions/text-parsable-to-boolean-percent.md b/docs/checks/column/conversions/text-parsable-to-boolean-percent.md index 92791559d4..75bbe5cde4 100644 --- a/docs/checks/column/conversions/text-parsable-to-boolean-percent.md +++ b/docs/checks/column/conversions/text-parsable-to-boolean-percent.md @@ -815,6 +815,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1654,6 +1694,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2519,6 +2601,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3359,6 +3481,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4224,6 +4388,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5064,6 +5268,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6014,6 +6260,50 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6911,6 +7201,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7865,6 +8199,50 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8762,6 +9140,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/conversions/text-parsable-to-date-percent.md b/docs/checks/column/conversions/text-parsable-to-date-percent.md index 85eb12164a..260c44640c 100644 --- a/docs/checks/column/conversions/text-parsable-to-date-percent.md +++ b/docs/checks/column/conversions/text-parsable-to-date-percent.md @@ -1103,6 +1103,67 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2246,6 +2307,69 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3415,6 +3539,67 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4559,6 +4744,69 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5728,6 +5976,67 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6872,6 +7181,69 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -8126,6 +8498,71 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9327,6 +9764,71 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -10585,6 +11087,71 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -11786,6 +12353,71 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/conversions/text-parsable-to-float-percent.md b/docs/checks/column/conversions/text-parsable-to-float-percent.md index 79839d5cd5..9601ad6009 100644 --- a/docs/checks/column/conversions/text-parsable-to-float-percent.md +++ b/docs/checks/column/conversions/text-parsable-to-float-percent.md @@ -695,6 +695,40 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{ lib.render_data_grouping_projections('analyzed_table') }} + {{ lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + {{ lib.render_group_by() }} + {{ lib.render_order_by() }} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT(analyzed_table."target_column") + END AS actual_value + + + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1414,6 +1448,45 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{ lib.render_data_grouping_projections('analyzed_table') }} + {{ lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + {{ lib.render_group_by() }} + {{ lib.render_order_by() }} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT(analyzed_table."target_column") + END AS actual_value + , + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + + FROM ""."" AS analyzed_table + + + GROUP BY grouping_level_1, grouping_level_2 + + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2153,6 +2226,40 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{ lib.render_data_grouping_projections('analyzed_table') }} + {{ lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + {{ lib.render_group_by() }} + {{ lib.render_order_by() }} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT(analyzed_table."target_column") + END AS actual_value + + + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2873,6 +2980,45 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{ lib.render_data_grouping_projections('analyzed_table') }} + {{ lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + {{ lib.render_group_by() }} + {{ lib.render_order_by() }} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT(analyzed_table."target_column") + END AS actual_value + , + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + + FROM ""."" AS analyzed_table + + + GROUP BY grouping_level_1, grouping_level_2 + + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3612,6 +3758,40 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{ lib.render_data_grouping_projections('analyzed_table') }} + {{ lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + {{ lib.render_group_by() }} + {{ lib.render_order_by() }} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT(analyzed_table."target_column") + END AS actual_value + + + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4332,6 +4512,45 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{ lib.render_data_grouping_projections('analyzed_table') }} + {{ lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + {{ lib.render_group_by() }} + {{ lib.render_order_by() }} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT(analyzed_table."target_column") + END AS actual_value + , + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + + FROM ""."" AS analyzed_table + + + GROUP BY grouping_level_1, grouping_level_2 + + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5162,6 +5381,47 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{ lib.render_data_grouping_projections('analyzed_table') }} + {{ lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + {{ lib.render_group_by() }} + {{ lib.render_order_by() }} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT(analyzed_table."target_column") + END AS actual_value + + , + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + + + GROUP BY time_period, time_period_utc + + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5939,6 +6199,47 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{ lib.render_data_grouping_projections('analyzed_table') }} + {{ lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + {{ lib.render_group_by() }} + {{ lib.render_order_by() }} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT(analyzed_table."target_column") + END AS actual_value + , + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + , + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + + + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -6773,6 +7074,47 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{ lib.render_data_grouping_projections('analyzed_table') }} + {{ lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + {{ lib.render_group_by() }} + {{ lib.render_order_by() }} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT(analyzed_table."target_column") + END AS actual_value + + , + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + + + GROUP BY time_period, time_period_utc + + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7550,6 +7892,47 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{ lib.render_data_grouping_projections('analyzed_table') }} + {{ lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + {{ lib.render_group_by() }} + {{ lib.render_order_by() }} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT(analyzed_table."target_column") + END AS actual_value + , + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + , + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + + + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/conversions/text-parsable-to-integer-percent.md b/docs/checks/column/conversions/text-parsable-to-integer-percent.md index ea4d45058b..4e44aff7e3 100644 --- a/docs/checks/column/conversions/text-parsable-to-integer-percent.md +++ b/docs/checks/column/conversions/text-parsable-to-integer-percent.md @@ -692,6 +692,38 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST({{ lib.render_target_column('analyzed_table') }} AS INTEGER) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST(analyzed_table."target_column" AS INTEGER) + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1402,6 +1434,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST({{ lib.render_target_column('analyzed_table') }} AS INTEGER) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST(analyzed_table."target_column" AS INTEGER) + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2138,6 +2204,38 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST({{ lib.render_target_column('analyzed_table') }} AS INTEGER) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST(analyzed_table."target_column" AS INTEGER) + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2849,6 +2947,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST({{ lib.render_target_column('analyzed_table') }} AS INTEGER) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST(analyzed_table."target_column" AS INTEGER) + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3585,6 +3717,38 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST({{ lib.render_target_column('analyzed_table') }} AS INTEGER) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST(analyzed_table."target_column" AS INTEGER) + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4296,6 +4460,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST({{ lib.render_target_column('analyzed_table') }} AS INTEGER) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST(analyzed_table."target_column" AS INTEGER) + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5117,6 +5315,42 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST({{ lib.render_target_column('analyzed_table') }} AS INTEGER) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST(analyzed_table."target_column" AS INTEGER) + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5885,6 +6119,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST({{ lib.render_target_column('analyzed_table') }} AS INTEGER) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST(analyzed_table."target_column" AS INTEGER) + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -6710,6 +6980,42 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST({{ lib.render_target_column('analyzed_table') }} AS INTEGER) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST(analyzed_table."target_column" AS INTEGER) + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7478,6 +7784,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST({{ lib.render_target_column('analyzed_table') }} AS INTEGER) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST(analyzed_table."target_column" AS INTEGER) + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/custom_sql/import-custom-result-on-column.md b/docs/checks/column/custom_sql/import-custom-result-on-column.md index 530470de3a..6f86286ed8 100644 --- a/docs/checks/column/custom_sql/import-custom-result-on-column.md +++ b/docs/checks/column/custom_sql/import-custom-result-on-column.md @@ -470,6 +470,28 @@ spec: ``` === "Rendered SQL for SQL Server" + ```sql + SELECT + logs.my_actual_value as actual_value, + logs.my_expected_value as expected_value, + logs.error_severity as severity + FROM custom_data_quality_results as logs + WHERE logs.analyzed_schema_name = '' AND + logs.analyzed_table_name = '' AND + logs.analyzed_column_name = 'target_column' + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {{ parameters.sql_query | replace('{table_name}', target_table.table_name) + | replace('{schema_name}', target_table.schema_name) + | replace('{column_name}', column_name) }} + ``` + === "Rendered SQL for Teradata" + ```sql SELECT logs.my_actual_value as actual_value, @@ -962,6 +984,28 @@ spec: ``` === "Rendered SQL for SQL Server" + ```sql + SELECT + logs.my_actual_value as actual_value, + logs.my_expected_value as expected_value, + logs.error_severity as severity + FROM custom_data_quality_results as logs + WHERE logs.analyzed_schema_name = '' AND + logs.analyzed_table_name = '' AND + logs.analyzed_column_name = 'target_column' + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {{ parameters.sql_query | replace('{table_name}', target_table.table_name) + | replace('{schema_name}', target_table.schema_name) + | replace('{column_name}', column_name) }} + ``` + === "Rendered SQL for Teradata" + ```sql SELECT logs.my_actual_value as actual_value, @@ -1454,6 +1498,28 @@ spec: ``` === "Rendered SQL for SQL Server" + ```sql + SELECT + logs.my_actual_value as actual_value, + logs.my_expected_value as expected_value, + logs.error_severity as severity + FROM custom_data_quality_results as logs + WHERE logs.analyzed_schema_name = '' AND + logs.analyzed_table_name = '' AND + logs.analyzed_column_name = 'target_column' + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {{ parameters.sql_query | replace('{table_name}', target_table.table_name) + | replace('{schema_name}', target_table.schema_name) + | replace('{column_name}', column_name) }} + ``` + === "Rendered SQL for Teradata" + ```sql SELECT logs.my_actual_value as actual_value, diff --git a/docs/checks/column/custom_sql/sql-aggregate-expression-on-column.md b/docs/checks/column/custom_sql/sql-aggregate-expression-on-column.md index d92ea37ef8..c871ece9bd 100644 --- a/docs/checks/column/custom_sql/sql-aggregate-expression-on-column.md +++ b/docs/checks/column/custom_sql/sql-aggregate-expression-on-column.md @@ -546,6 +546,29 @@ spec: (MAX(analyzed_table.[target_column])) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (MAX(analyzed_table."target_column")) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1101,6 +1124,31 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + (MAX(analyzed_table."target_column")) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1682,6 +1730,29 @@ spec: (MAX(analyzed_table.[target_column])) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (MAX(analyzed_table."target_column")) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2238,6 +2309,31 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + (MAX(analyzed_table."target_column")) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2819,6 +2915,29 @@ spec: (MAX(analyzed_table.[target_column])) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (MAX(analyzed_table."target_column")) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3375,6 +3494,31 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + (MAX(analyzed_table."target_column")) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4041,6 +4185,33 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (MAX(analyzed_table."target_column")) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4654,6 +4825,33 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + (MAX(analyzed_table."target_column")) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5324,6 +5522,33 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (MAX(analyzed_table."target_column")) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5937,6 +6162,33 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + (MAX(analyzed_table."target_column")) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/custom_sql/sql-condition-failed-on-column.md b/docs/checks/column/custom_sql/sql-condition-failed-on-column.md index 590f967931..16e6ea4ac6 100644 --- a/docs/checks/column/custom_sql/sql-condition-failed-on-column.md +++ b/docs/checks/column/custom_sql/sql-condition-failed-on-column.md @@ -776,6 +776,43 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND NOT ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND NOT (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1572,6 +1609,45 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND NOT ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND NOT (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2394,6 +2470,43 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND NOT ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND NOT (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3191,6 +3304,45 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND NOT ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND NOT (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4013,6 +4165,43 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND NOT ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND NOT (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4810,6 +4999,45 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND NOT ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND NOT (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5717,6 +5945,47 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND NOT ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND NOT (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6571,6 +6840,47 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND NOT ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND NOT (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7482,6 +7792,47 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND NOT ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND NOT (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8336,6 +8687,47 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND NOT ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND NOT (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/custom_sql/sql-condition-passed-percent-on-column.md b/docs/checks/column/custom_sql/sql-condition-passed-percent-on-column.md index 960ca86882..145b88fa8f 100644 --- a/docs/checks/column/custom_sql/sql-condition-passed-percent-on-column.md +++ b/docs/checks/column/custom_sql/sql-condition-passed-percent-on-column.md @@ -866,6 +866,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1760,6 +1803,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2680,6 +2768,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3575,6 +3706,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4495,6 +4671,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5390,6 +5609,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6395,6 +6659,53 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7347,6 +7658,53 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8356,6 +8714,53 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9308,6 +9713,53 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND (analyzed_table."target_column" + col_tax = col_total_price_with_tax) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/datatype/detected-datatype-in-text-changed.md b/docs/checks/column/datatype/detected-datatype-in-text-changed.md index 3816aa5dfe..d7e65a4030 100644 --- a/docs/checks/column/datatype/detected-datatype-in-text-changed.md +++ b/docs/checks/column/datatype/detected-datatype-in-text-changed.md @@ -3226,6 +3226,177 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6620,19 +6791,19 @@ Expand the *Configure with data grouping* section to see additional examples for ``` - ??? example "Trino" + ??? example "Teradata" - === "Sensor template for Trino" + === "Sensor template for Teradata" ```sql+jinja - {% import '/dialects/trino.sql.jinja2' as lib with context -%} + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} SELECT CASE - WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL THEN 1 ELSE 0 END @@ -6641,61 +6812,60 @@ Expand the *Configure with data grouping* section to see additional examples for WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR {# Casting double to varchar in trino results in a scientific notation #} - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 2 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 3 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 4 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = - SUM( - CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') - THEN 1 - ELSE 0 - END - ) - THEN 5 + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 6 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 0 - WHEN TRIM(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR)) <> '' + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' THEN 1 ELSE 0 END @@ -6703,28 +6873,22 @@ Expand the *Configure with data grouping* section to see additional examples for THEN 7 ELSE 8 END AS actual_value - {{- lib.render_data_grouping_projections_reference('analyzed_table') }} - {{- lib.render_time_dimension_projection_reference('analyzed_table') }} - FROM ( - SELECT - original_table.* - {{- lib.render_data_grouping_projections('original_table') }} - {{- lib.render_time_dimension_projection('original_table') }} - FROM {{ lib.render_target_table() }} original_table - ) analyzed_table + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table {{- lib.render_where_clause() -}} {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` - === "Rendered SQL for Trino" + === "Rendered SQL for Teradata" ```sql SELECT CASE - WHEN COUNT(analyzed_table."target_column") = 0 THEN 0 + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL THEN 1 ELSE 0 END @@ -6733,61 +6897,60 @@ Expand the *Configure with data grouping* section to see additional examples for WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 2 WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 3 WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 4 WHEN COUNT(analyzed_table."target_column") = - SUM( - CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') - THEN 1 - ELSE 0 - END - ) - THEN 5 + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 6 WHEN COUNT(analyzed_table."target_column") = SUM( CASE WHEN analyzed_table."target_column" IS NULL OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 0 - WHEN TRIM(TRY_CAST(analyzed_table."target_column" AS VARCHAR)) <> '' + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' THEN 1 ELSE 0 END @@ -6795,29 +6958,210 @@ Expand the *Configure with data grouping* section to see additional examples for THEN 7 ELSE 8 END AS actual_value, - - analyzed_table.grouping_level_1, - - analyzed_table.grouping_level_2 - - FROM ( - SELECT - original_table.*, - original_table."country" AS grouping_level_1, - original_table."state" AS grouping_level_2 - FROM "your_trino_catalog".""."" original_table - ) analyzed_table + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table GROUP BY grouping_level_1, grouping_level_2 ORDER BY grouping_level_1, grouping_level_2 ``` - -___ - - -## daily detected datatype in text changed - + ??? example "Trino" -**Check description** + === "Sensor template for Trino" + ```sql+jinja + {% import '/dialects/trino.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR {# Casting double to varchar in trino results in a scientific notation #} + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 0 + WHEN TRIM(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR)) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections_reference('analyzed_table') }} + {{- lib.render_time_dimension_projection_reference('analyzed_table') }} + FROM ( + SELECT + original_table.* + {{- lib.render_data_grouping_projections('original_table') }} + {{- lib.render_time_dimension_projection('original_table') }} + FROM {{ lib.render_target_table() }} original_table + ) analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Trino" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 0 + WHEN TRIM(TRY_CAST(analyzed_table."target_column" AS VARCHAR)) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value, + + analyzed_table.grouping_level_1, + + analyzed_table.grouping_level_2 + + FROM ( + SELECT + original_table.*, + original_table."country" AS grouping_level_1, + original_table."state" AS grouping_level_2 + FROM "your_trino_catalog".""."" original_table + ) analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` + +___ + + +## daily detected datatype in text changed + + +**Check description** Detects that the data type of texts stored in a text column has changed since the last verification. The sensor returns the detected type of column data: 1 - integers, 2 - floats, 3 - dates, 4 - datetimes, 5 - timestamps, 6 - booleans, 7 - strings, 8 - mixed data types. Stores the most recent captured value for each day when the data quality check was evaluated. @@ -10026,20 +10370,20 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` - ??? example "Trino" + ??? example "Teradata" - === "Sensor template for Trino" + === "Sensor template for Teradata" ```sql+jinja - {% import '/dialects/trino.sql.jinja2' as lib with context -%} + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} SELECT CASE - WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL THEN 1 ELSE 0 END @@ -10048,61 +10392,60 @@ spec: WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR {# Casting double to varchar in trino results in a scientific notation #} - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 2 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 3 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 4 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = - SUM( - CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') - THEN 1 - ELSE 0 - END - ) - THEN 5 + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 6 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 0 - WHEN TRIM(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR)) <> '' + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' THEN 1 ELSE 0 END @@ -10110,29 +10453,23 @@ spec: THEN 7 ELSE 8 END AS actual_value - {{- lib.render_data_grouping_projections_reference('analyzed_table') }} - {{- lib.render_time_dimension_projection_reference('analyzed_table') }} - FROM ( - SELECT - original_table.* - {{- lib.render_data_grouping_projections('original_table') }} - {{- lib.render_time_dimension_projection('original_table') }} - FROM {{ lib.render_target_table() }} original_table - ) analyzed_table + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table {{- lib.render_where_clause() -}} {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` - === "Rendered SQL for Trino" + === "Rendered SQL for Teradata" ```sql SELECT CASE - WHEN COUNT(analyzed_table."target_column") = 0 THEN 0 + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL THEN 1 ELSE 0 END @@ -10141,11 +10478,189 @@ spec: WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL THEN 1 ELSE 0 - END + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + FROM ""."" AS analyzed_table + ``` + ??? example "Trino" + + === "Sensor template for Trino" + + ```sql+jinja + {% import '/dialects/trino.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR {# Casting double to varchar in trino results in a scientific notation #} + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 0 + WHEN TRIM(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR)) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections_reference('analyzed_table') }} + {{- lib.render_time_dimension_projection_reference('analyzed_table') }} + FROM ( + SELECT + original_table.* + {{- lib.render_data_grouping_projections('original_table') }} + {{- lib.render_time_dimension_projection('original_table') }} + FROM {{ lib.render_target_table() }} original_table + ) analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Trino" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + THEN 1 + ELSE 0 + END ) THEN 2 WHEN COUNT(analyzed_table."target_column") = @@ -13421,19 +13936,19 @@ Expand the *Configure with data grouping* section to see additional examples for ``` - ??? example "Trino" + ??? example "Teradata" - === "Sensor template for Trino" + === "Sensor template for Teradata" ```sql+jinja - {% import '/dialects/trino.sql.jinja2' as lib with context -%} + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} SELECT CASE - WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL THEN 1 ELSE 0 END @@ -13442,61 +13957,60 @@ Expand the *Configure with data grouping* section to see additional examples for WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR {# Casting double to varchar in trino results in a scientific notation #} - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 2 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 3 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 4 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = - SUM( - CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') - THEN 1 - ELSE 0 - END - ) - THEN 5 + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 6 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 0 - WHEN TRIM(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR)) <> '' + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' THEN 1 ELSE 0 END @@ -13504,28 +14018,22 @@ Expand the *Configure with data grouping* section to see additional examples for THEN 7 ELSE 8 END AS actual_value - {{- lib.render_data_grouping_projections_reference('analyzed_table') }} - {{- lib.render_time_dimension_projection_reference('analyzed_table') }} - FROM ( - SELECT - original_table.* - {{- lib.render_data_grouping_projections('original_table') }} - {{- lib.render_time_dimension_projection('original_table') }} - FROM {{ lib.render_target_table() }} original_table - ) analyzed_table + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table {{- lib.render_where_clause() -}} {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` - === "Rendered SQL for Trino" + === "Rendered SQL for Teradata" ```sql SELECT CASE - WHEN COUNT(analyzed_table."target_column") = 0 THEN 0 + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL THEN 1 ELSE 0 END @@ -13534,55 +14042,235 @@ Expand the *Configure with data grouping* section to see additional examples for WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 2 WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 3 WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 4 WHEN COUNT(analyzed_table."target_column") = - SUM( - CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') - THEN 1 - ELSE 0 - END - ) - THEN 5 + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 6 WHEN COUNT(analyzed_table."target_column") = SUM( CASE WHEN analyzed_table."target_column" IS NULL OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` + ??? example "Trino" + + === "Sensor template for Trino" + ```sql+jinja + {% import '/dialects/trino.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR {# Casting double to varchar in trino results in a scientific notation #} + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 0 + WHEN TRIM(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR)) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections_reference('analyzed_table') }} + {{- lib.render_time_dimension_projection_reference('analyzed_table') }} + FROM ( + SELECT + original_table.* + {{- lib.render_data_grouping_projections('original_table') }} + {{- lib.render_time_dimension_projection('original_table') }} + FROM {{ lib.render_target_table() }} original_table + ) analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Trino" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR @@ -16696,128 +17384,299 @@ spec: OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 23) IS NOT NULL -- yyyy-mm-dd ) THEN 1 ELSE 0 - END + END + ) + THEN 3 + WHEN COUNT_BIG(analyzed_table.[target_column]) = + SUM( + CASE + WHEN CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-0-9./:APMapm ]%' + AND ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]0[1-9][-/.]%' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]1[0-2][-/.]%' + ) AND ( + ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '0[1-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[12][0-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '3[01][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + ) OR ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]0[1-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.][12][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]3[01] %' + ) + ) AND ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[0][0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[1][0-2][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[01][0-9][:][0-5][0-9][:][0-5][0-9]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[2][0-3][:][0-5][0-9][:][0-5][0-9]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9]' + ) + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT_BIG(analyzed_table.[target_column]) = + SUM( + CASE + WHEN TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT_BIG(analyzed_table.[target_column]) = + SUM( + CASE + WHEN LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT_BIG(analyzed_table.[target_column]) = + SUM( + CASE WHEN analyzed_table.[target_column] IS NULL OR + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^0-9]%' OR + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-+0123456789.,]%' OR + LEN(CAST(analyzed_table.[target_column] AS VARCHAR)) <= 10 + AND( + TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 110) IS NOT NULL -- mm-dd-yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 101) IS NOT NULL -- mm/dd/yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 105) IS NOT NULL -- dd-mm-yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 103) IS NOT NULL -- dd/mm/yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 104) IS NOT NULL -- dd.mm.yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 107) IS NOT NULL -- Mon dd, yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 102) IS NOT NULL -- yyyy.mm.dd + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 111) IS NOT NULL -- yyyy/mm/dd + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 23) IS NOT NULL -- yyyy-mm-dd + ) + OR + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-0-9./:APMapm ]%' + AND ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]0[1-9][-/.]%' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]1[0-2][-/.]%' + ) AND ( + ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '0[1-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[12][0-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '3[01][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + ) OR ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]0[1-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.][12][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]3[01] %' + ) + ) AND ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[0][0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[1][0-2][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[01][0-9][:][0-5][0-9][:][0-5][0-9]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[2][0-3][:][0-5][0-9][:][0-5][0-9]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9]' + ) + OR + TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + THEN 0 + WHEN TRIM(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + FROM [your_sql_server_database].[].[] AS analyzed_table + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END ) THEN 3 - WHEN COUNT_BIG(analyzed_table.[target_column]) = + WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-0-9./:APMapm ]%' - AND ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]0[1-9][-/.]%' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]1[0-2][-/.]%' - ) AND ( - ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '0[1-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[12][0-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '3[01][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - ) OR ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]0[1-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.][12][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]3[01] %' - ) - ) AND ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[0][0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[1][0-2][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[01][0-9][:][0-5][0-9][:][0-5][0-9]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[2][0-3][:][0-5][0-9][:][0-5][0-9]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9]' - ) + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 4 - WHEN COUNT_BIG(analyzed_table.[target_column]) = + WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - THEN 1 + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 ELSE 0 END ) THEN 5 - WHEN COUNT_BIG(analyzed_table.[target_column]) = + WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 6 - WHEN COUNT_BIG(analyzed_table.[target_column]) = + WHEN COUNT(analyzed_table."target_column") = SUM( - CASE WHEN analyzed_table.[target_column] IS NULL OR - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^0-9]%' OR - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-+0123456789.,]%' OR - LEN(CAST(analyzed_table.[target_column] AS VARCHAR)) <= 10 - AND( - TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 110) IS NOT NULL -- mm-dd-yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 101) IS NOT NULL -- mm/dd/yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 105) IS NOT NULL -- dd-mm-yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 103) IS NOT NULL -- dd/mm/yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 104) IS NOT NULL -- dd.mm.yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 107) IS NOT NULL -- Mon dd, yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 102) IS NOT NULL -- yyyy.mm.dd - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 111) IS NOT NULL -- yyyy/mm/dd - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 23) IS NOT NULL -- yyyy-mm-dd - ) - OR - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-0-9./:APMapm ]%' - AND ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]0[1-9][-/.]%' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]1[0-2][-/.]%' - ) AND ( - ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '0[1-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[12][0-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '3[01][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - ) OR ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]0[1-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.][12][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]3[01] %' - ) - ) AND ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[0][0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[1][0-2][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[01][0-9][:][0-5][0-9][:][0-5][0-9]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[2][0-3][:][0-5][0-9][:][0-5][0-9]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9]' - ) - OR - TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 0 - WHEN TRIM(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) <> '' + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' THEN 1 ELSE 0 END @@ -16825,7 +17684,7 @@ spec: THEN 7 ELSE 8 END AS actual_value - FROM [your_sql_server_database].[].[] AS analyzed_table + FROM ""."" AS analyzed_table ``` ??? example "Trino" @@ -20221,6 +21080,179 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -23713,6 +24745,181 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -27147,7 +28354,184 @@ Expand the *Configure with data grouping* section to see additional examples for OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn OR LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') THEN 0 - WHEN TRIM(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) <> '' + WHEN TRIM(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value, + analyzed_table.[country] AS grouping_level_1, + analyzed_table.[state] AS grouping_level_2, + CAST(analyzed_table.[date_column] AS date) AS time_period, + CAST((CAST(analyzed_table.[date_column] AS date)) AS DATETIME) AS time_period_utc + FROM [your_sql_server_database].[].[] AS analyzed_table + GROUP BY analyzed_table.[country], analyzed_table.[state], CAST(analyzed_table.[date_column] AS date), CAST(analyzed_table.[date_column] AS date) + ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + + + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' THEN 1 ELSE 0 END @@ -27155,15 +28539,13 @@ Expand the *Configure with data grouping* section to see additional examples for THEN 7 ELSE 8 END AS actual_value, - analyzed_table.[country] AS grouping_level_1, - analyzed_table.[state] AS grouping_level_2, - CAST(analyzed_table.[date_column] AS date) AS time_period, - CAST((CAST(analyzed_table.[date_column] AS date)) AS DATETIME) AS time_period_utc - FROM [your_sql_server_database].[].[] AS analyzed_table - GROUP BY analyzed_table.[country], analyzed_table.[state], CAST(analyzed_table.[date_column] AS date), CAST(analyzed_table.[date_column] AS date) - ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) - - + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -30554,98 +31936,275 @@ spec: ) THEN 1 ELSE 0 - END + END + ) + THEN 4 + WHEN COUNT_BIG(analyzed_table.[target_column]) = + SUM( + CASE + WHEN TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT_BIG(analyzed_table.[target_column]) = + SUM( + CASE + WHEN LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT_BIG(analyzed_table.[target_column]) = + SUM( + CASE WHEN analyzed_table.[target_column] IS NULL OR + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^0-9]%' OR + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-+0123456789.,]%' OR + LEN(CAST(analyzed_table.[target_column] AS VARCHAR)) <= 10 + AND( + TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 110) IS NOT NULL -- mm-dd-yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 101) IS NOT NULL -- mm/dd/yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 105) IS NOT NULL -- dd-mm-yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 103) IS NOT NULL -- dd/mm/yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 104) IS NOT NULL -- dd.mm.yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 107) IS NOT NULL -- Mon dd, yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 102) IS NOT NULL -- yyyy.mm.dd + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 111) IS NOT NULL -- yyyy/mm/dd + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 23) IS NOT NULL -- yyyy-mm-dd + ) + OR + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-0-9./:APMapm ]%' + AND ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]0[1-9][-/.]%' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]1[0-2][-/.]%' + ) AND ( + ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '0[1-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[12][0-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '3[01][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + ) OR ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]0[1-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.][12][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]3[01] %' + ) + ) AND ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[0][0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[1][0-2][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[01][0-9][:][0-5][0-9][:][0-5][0-9]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[2][0-3][:][0-5][0-9][:][0-5][0-9]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9]' + ) + OR + TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + THEN 0 + WHEN TRIM(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value, + DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) AS time_period, + CAST((DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1)) AS DATETIME) AS time_period_utc + FROM [your_sql_server_database].[].[] AS analyzed_table + GROUP BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1), DATEADD(month, DATEDIFF(month, 0, analyzed_table.[date_column]), 0) + ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + + + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END ) THEN 4 - WHEN COUNT_BIG(analyzed_table.[target_column]) = + WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - THEN 1 + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 ELSE 0 END ) THEN 5 - WHEN COUNT_BIG(analyzed_table.[target_column]) = + WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 6 - WHEN COUNT_BIG(analyzed_table.[target_column]) = + WHEN COUNT(analyzed_table."target_column") = SUM( - CASE WHEN analyzed_table.[target_column] IS NULL OR - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^0-9]%' OR - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-+0123456789.,]%' OR - LEN(CAST(analyzed_table.[target_column] AS VARCHAR)) <= 10 - AND( - TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 110) IS NOT NULL -- mm-dd-yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 101) IS NOT NULL -- mm/dd/yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 105) IS NOT NULL -- dd-mm-yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 103) IS NOT NULL -- dd/mm/yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 104) IS NOT NULL -- dd.mm.yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 107) IS NOT NULL -- Mon dd, yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 102) IS NOT NULL -- yyyy.mm.dd - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 111) IS NOT NULL -- yyyy/mm/dd - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 23) IS NOT NULL -- yyyy-mm-dd - ) - OR - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-0-9./:APMapm ]%' - AND ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]0[1-9][-/.]%' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]1[0-2][-/.]%' - ) AND ( - ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '0[1-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[12][0-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '3[01][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - ) OR ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]0[1-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.][12][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]3[01] %' - ) - ) AND ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[0][0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[1][0-2][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[01][0-9][:][0-5][0-9][:][0-5][0-9]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[2][0-3][:][0-5][0-9][:][0-5][0-9]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9]' - ) - OR - TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 0 - WHEN TRIM(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) <> '' + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' THEN 1 ELSE 0 END @@ -30653,13 +32212,11 @@ spec: THEN 7 ELSE 8 END AS actual_value, - DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) AS time_period, - CAST((DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1)) AS DATETIME) AS time_period_utc - FROM [your_sql_server_database].[].[] AS analyzed_table - GROUP BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1), DATEADD(month, DATEDIFF(month, 0, analyzed_table.[date_column]), 0) - ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) - - + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -34111,6 +35668,181 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/datatype/detected-datatype-in-text.md b/docs/checks/column/datatype/detected-datatype-in-text.md index 647c142029..8c5195d876 100644 --- a/docs/checks/column/datatype/detected-datatype-in-text.md +++ b/docs/checks/column/datatype/detected-datatype-in-text.md @@ -3235,6 +3235,177 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6629,19 +6800,19 @@ Expand the *Configure with data grouping* section to see additional examples for ``` - ??? example "Trino" + ??? example "Teradata" - === "Sensor template for Trino" + === "Sensor template for Teradata" ```sql+jinja - {% import '/dialects/trino.sql.jinja2' as lib with context -%} + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} SELECT CASE - WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL THEN 1 ELSE 0 END @@ -6650,61 +6821,60 @@ Expand the *Configure with data grouping* section to see additional examples for WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR {# Casting double to varchar in trino results in a scientific notation #} - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 2 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 3 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 4 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = - SUM( - CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') - THEN 1 - ELSE 0 - END - ) - THEN 5 + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 6 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 0 - WHEN TRIM(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR)) <> '' + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' THEN 1 ELSE 0 END @@ -6712,28 +6882,22 @@ Expand the *Configure with data grouping* section to see additional examples for THEN 7 ELSE 8 END AS actual_value - {{- lib.render_data_grouping_projections_reference('analyzed_table') }} - {{- lib.render_time_dimension_projection_reference('analyzed_table') }} - FROM ( - SELECT - original_table.* - {{- lib.render_data_grouping_projections('original_table') }} - {{- lib.render_time_dimension_projection('original_table') }} - FROM {{ lib.render_target_table() }} original_table - ) analyzed_table + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table {{- lib.render_where_clause() -}} {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` - === "Rendered SQL for Trino" + === "Rendered SQL for Teradata" ```sql SELECT CASE - WHEN COUNT(analyzed_table."target_column") = 0 THEN 0 + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL THEN 1 ELSE 0 END @@ -6742,61 +6906,60 @@ Expand the *Configure with data grouping* section to see additional examples for WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 2 WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 3 WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 4 WHEN COUNT(analyzed_table."target_column") = - SUM( - CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') - THEN 1 - ELSE 0 - END - ) - THEN 5 + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 6 WHEN COUNT(analyzed_table."target_column") = SUM( CASE WHEN analyzed_table."target_column" IS NULL OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 0 - WHEN TRIM(TRY_CAST(analyzed_table."target_column" AS VARCHAR)) <> '' + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' THEN 1 ELSE 0 END @@ -6804,29 +6967,210 @@ Expand the *Configure with data grouping* section to see additional examples for THEN 7 ELSE 8 END AS actual_value, - - analyzed_table.grouping_level_1, - - analyzed_table.grouping_level_2 - - FROM ( - SELECT - original_table.*, - original_table."country" AS grouping_level_1, - original_table."state" AS grouping_level_2 - FROM "your_trino_catalog".""."" original_table - ) analyzed_table + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table GROUP BY grouping_level_1, grouping_level_2 ORDER BY grouping_level_1, grouping_level_2 ``` - -___ - - -## daily detected datatype in text - + ??? example "Trino" -**Check description** + === "Sensor template for Trino" + ```sql+jinja + {% import '/dialects/trino.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR {# Casting double to varchar in trino results in a scientific notation #} + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 0 + WHEN TRIM(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR)) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections_reference('analyzed_table') }} + {{- lib.render_time_dimension_projection_reference('analyzed_table') }} + FROM ( + SELECT + original_table.* + {{- lib.render_data_grouping_projections('original_table') }} + {{- lib.render_time_dimension_projection('original_table') }} + FROM {{ lib.render_target_table() }} original_table + ) analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Trino" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 0 + WHEN TRIM(TRY_CAST(analyzed_table."target_column" AS VARCHAR)) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value, + + analyzed_table.grouping_level_1, + + analyzed_table.grouping_level_2 + + FROM ( + SELECT + original_table.*, + original_table."country" AS grouping_level_1, + original_table."state" AS grouping_level_2 + FROM "your_trino_catalog".""."" original_table + ) analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` + +___ + + +## daily detected datatype in text + + +**Check description** Detects the data type of text values stored in the column. The sensor returns the code of the detected type of column data: 1 - integers, 2 - floats, 3 - dates, 4 - datetimes, 5 - timestamps, 6 - booleans, 7 - strings, 8 - mixed data types. Raises a data quality issue when the detected data type does not match the expected data type. Stores the most recent captured value for each day when the data quality check was evaluated. @@ -10047,20 +10391,20 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` - ??? example "Trino" + ??? example "Teradata" - === "Sensor template for Trino" + === "Sensor template for Teradata" ```sql+jinja - {% import '/dialects/trino.sql.jinja2' as lib with context -%} + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} SELECT CASE - WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL THEN 1 ELSE 0 END @@ -10069,61 +10413,60 @@ spec: WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR {# Casting double to varchar in trino results in a scientific notation #} - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 2 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 3 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 4 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = - SUM( - CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') - THEN 1 - ELSE 0 - END - ) - THEN 5 + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 6 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 0 - WHEN TRIM(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR)) <> '' + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' THEN 1 ELSE 0 END @@ -10131,29 +10474,23 @@ spec: THEN 7 ELSE 8 END AS actual_value - {{- lib.render_data_grouping_projections_reference('analyzed_table') }} - {{- lib.render_time_dimension_projection_reference('analyzed_table') }} - FROM ( - SELECT - original_table.* - {{- lib.render_data_grouping_projections('original_table') }} - {{- lib.render_time_dimension_projection('original_table') }} - FROM {{ lib.render_target_table() }} original_table - ) analyzed_table + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table {{- lib.render_where_clause() -}} {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` - === "Rendered SQL for Trino" + === "Rendered SQL for Teradata" ```sql SELECT CASE - WHEN COUNT(analyzed_table."target_column") = 0 THEN 0 + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL THEN 1 ELSE 0 END @@ -10162,11 +10499,189 @@ spec: WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL THEN 1 ELSE 0 - END + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + FROM ""."" AS analyzed_table + ``` + ??? example "Trino" + + === "Sensor template for Trino" + + ```sql+jinja + {% import '/dialects/trino.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR {# Casting double to varchar in trino results in a scientific notation #} + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 0 + WHEN TRIM(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR)) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections_reference('analyzed_table') }} + {{- lib.render_time_dimension_projection_reference('analyzed_table') }} + FROM ( + SELECT + original_table.* + {{- lib.render_data_grouping_projections('original_table') }} + {{- lib.render_time_dimension_projection('original_table') }} + FROM {{ lib.render_target_table() }} original_table + ) analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Trino" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + THEN 1 + ELSE 0 + END ) THEN 2 WHEN COUNT(analyzed_table."target_column") = @@ -13442,19 +13957,19 @@ Expand the *Configure with data grouping* section to see additional examples for ``` - ??? example "Trino" + ??? example "Teradata" - === "Sensor template for Trino" + === "Sensor template for Teradata" ```sql+jinja - {% import '/dialects/trino.sql.jinja2' as lib with context -%} + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} SELECT CASE - WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL THEN 1 ELSE 0 END @@ -13463,61 +13978,60 @@ Expand the *Configure with data grouping* section to see additional examples for WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR {# Casting double to varchar in trino results in a scientific notation #} - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 2 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 3 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 4 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = - SUM( - CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') - THEN 1 - ELSE 0 - END - ) - THEN 5 + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 6 WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = SUM( CASE WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR - REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 0 - WHEN TRIM(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR)) <> '' + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' THEN 1 ELSE 0 END @@ -13525,28 +14039,22 @@ Expand the *Configure with data grouping* section to see additional examples for THEN 7 ELSE 8 END AS actual_value - {{- lib.render_data_grouping_projections_reference('analyzed_table') }} - {{- lib.render_time_dimension_projection_reference('analyzed_table') }} - FROM ( - SELECT - original_table.* - {{- lib.render_data_grouping_projections('original_table') }} - {{- lib.render_time_dimension_projection('original_table') }} - FROM {{ lib.render_target_table() }} original_table - ) analyzed_table + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table {{- lib.render_where_clause() -}} {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` - === "Rendered SQL for Trino" + === "Rendered SQL for Teradata" ```sql SELECT CASE - WHEN COUNT(analyzed_table."target_column") = 0 THEN 0 + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL THEN 1 ELSE 0 END @@ -13555,55 +14063,235 @@ Expand the *Configure with data grouping* section to see additional examples for WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 2 WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 3 WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 4 WHEN COUNT(analyzed_table."target_column") = - SUM( - CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') - THEN 1 - ELSE 0 - END - ) - THEN 5 + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 6 WHEN COUNT(analyzed_table."target_column") = SUM( CASE WHEN analyzed_table."target_column" IS NULL OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') OR - REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` + ??? example "Trino" + + === "Sensor template for Trino" + ```sql+jinja + {% import '/dialects/trino.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR {# Casting double to varchar in trino results in a scientific notation #} + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[-+]?\d+$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR + REGEXP_LIKE(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 0 + WHEN TRIM(TRY_CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR)) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections_reference('analyzed_table') }} + {{- lib.render_time_dimension_projection_reference('analyzed_table') }} + FROM ( + SELECT + original_table.* + {{- lib.render_data_grouping_projections('original_table') }} + {{- lib.render_time_dimension_projection('original_table') }} + FROM {{ lib.render_target_table() }} original_table + ) analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Trino" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]{1}[.][0-9]*E[-]?[0-9]+$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?[0-9]*[.,]?[0-9]+$') + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[-+]?\d+$') OR + REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^[+-]?([0-9]*[.])[0-9]+$') OR REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') OR REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') OR REGEXP_LIKE(TRY_CAST(analyzed_table."target_column" AS VARCHAR), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') OR @@ -16729,128 +17417,299 @@ spec: OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 23) IS NOT NULL -- yyyy-mm-dd ) THEN 1 ELSE 0 - END + END + ) + THEN 3 + WHEN COUNT_BIG(analyzed_table.[target_column]) = + SUM( + CASE + WHEN CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-0-9./:APMapm ]%' + AND ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]0[1-9][-/.]%' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]1[0-2][-/.]%' + ) AND ( + ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '0[1-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[12][0-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '3[01][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + ) OR ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]0[1-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.][12][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]3[01] %' + ) + ) AND ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[0][0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[1][0-2][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[01][0-9][:][0-5][0-9][:][0-5][0-9]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[2][0-3][:][0-5][0-9][:][0-5][0-9]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9]' + ) + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT_BIG(analyzed_table.[target_column]) = + SUM( + CASE + WHEN TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT_BIG(analyzed_table.[target_column]) = + SUM( + CASE + WHEN LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT_BIG(analyzed_table.[target_column]) = + SUM( + CASE WHEN analyzed_table.[target_column] IS NULL OR + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^0-9]%' OR + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-+0123456789.,]%' OR + LEN(CAST(analyzed_table.[target_column] AS VARCHAR)) <= 10 + AND( + TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 110) IS NOT NULL -- mm-dd-yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 101) IS NOT NULL -- mm/dd/yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 105) IS NOT NULL -- dd-mm-yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 103) IS NOT NULL -- dd/mm/yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 104) IS NOT NULL -- dd.mm.yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 107) IS NOT NULL -- Mon dd, yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 102) IS NOT NULL -- yyyy.mm.dd + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 111) IS NOT NULL -- yyyy/mm/dd + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 23) IS NOT NULL -- yyyy-mm-dd + ) + OR + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-0-9./:APMapm ]%' + AND ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]0[1-9][-/.]%' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]1[0-2][-/.]%' + ) AND ( + ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '0[1-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[12][0-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '3[01][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + ) OR ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]0[1-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.][12][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]3[01] %' + ) + ) AND ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[0][0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[1][0-2][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[01][0-9][:][0-5][0-9][:][0-5][0-9]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[2][0-3][:][0-5][0-9][:][0-5][0-9]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9]' + ) + OR + TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + THEN 0 + WHEN TRIM(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + FROM [your_sql_server_database].[].[] AS analyzed_table + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END ) THEN 3 - WHEN COUNT_BIG(analyzed_table.[target_column]) = + WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-0-9./:APMapm ]%' - AND ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]0[1-9][-/.]%' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]1[0-2][-/.]%' - ) AND ( - ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '0[1-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[12][0-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '3[01][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - ) OR ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]0[1-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.][12][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]3[01] %' - ) - ) AND ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[0][0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[1][0-2][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[01][0-9][:][0-5][0-9][:][0-5][0-9]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[2][0-3][:][0-5][0-9][:][0-5][0-9]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9]' - ) + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 4 - WHEN COUNT_BIG(analyzed_table.[target_column]) = + WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - THEN 1 + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 ELSE 0 END ) THEN 5 - WHEN COUNT_BIG(analyzed_table.[target_column]) = + WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 6 - WHEN COUNT_BIG(analyzed_table.[target_column]) = + WHEN COUNT(analyzed_table."target_column") = SUM( - CASE WHEN analyzed_table.[target_column] IS NULL OR - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^0-9]%' OR - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-+0123456789.,]%' OR - LEN(CAST(analyzed_table.[target_column] AS VARCHAR)) <= 10 - AND( - TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 110) IS NOT NULL -- mm-dd-yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 101) IS NOT NULL -- mm/dd/yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 105) IS NOT NULL -- dd-mm-yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 103) IS NOT NULL -- dd/mm/yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 104) IS NOT NULL -- dd.mm.yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 107) IS NOT NULL -- Mon dd, yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 102) IS NOT NULL -- yyyy.mm.dd - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 111) IS NOT NULL -- yyyy/mm/dd - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 23) IS NOT NULL -- yyyy-mm-dd - ) - OR - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-0-9./:APMapm ]%' - AND ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]0[1-9][-/.]%' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]1[0-2][-/.]%' - ) AND ( - ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '0[1-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[12][0-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '3[01][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - ) OR ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]0[1-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.][12][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]3[01] %' - ) - ) AND ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[0][0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[1][0-2][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[01][0-9][:][0-5][0-9][:][0-5][0-9]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[2][0-3][:][0-5][0-9][:][0-5][0-9]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9]' - ) - OR - TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 0 - WHEN TRIM(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) <> '' + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' THEN 1 ELSE 0 END @@ -16858,7 +17717,7 @@ spec: THEN 7 ELSE 8 END AS actual_value - FROM [your_sql_server_database].[].[] AS analyzed_table + FROM ""."" AS analyzed_table ``` ??? example "Trino" @@ -20254,6 +21113,179 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -23758,6 +24790,181 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -27192,7 +28399,184 @@ Expand the *Configure with data grouping* section to see additional examples for OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn OR LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') THEN 0 - WHEN TRIM(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) <> '' + WHEN TRIM(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value, + analyzed_table.[country] AS grouping_level_1, + analyzed_table.[state] AS grouping_level_2, + CAST(analyzed_table.[date_column] AS date) AS time_period, + CAST((CAST(analyzed_table.[date_column] AS date)) AS DATETIME) AS time_period_utc + FROM [your_sql_server_database].[].[] AS analyzed_table + GROUP BY analyzed_table.[country], analyzed_table.[state], CAST(analyzed_table.[date_column] AS date), CAST(analyzed_table.[date_column] AS date) + ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + + + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' THEN 1 ELSE 0 END @@ -27200,15 +28584,13 @@ Expand the *Configure with data grouping* section to see additional examples for THEN 7 ELSE 8 END AS actual_value, - analyzed_table.[country] AS grouping_level_1, - analyzed_table.[state] AS grouping_level_2, - CAST(analyzed_table.[date_column] AS date) AS time_period, - CAST((CAST(analyzed_table.[date_column] AS date)) AS DATETIME) AS time_period_utc - FROM [your_sql_server_database].[].[] AS analyzed_table - GROUP BY analyzed_table.[country], analyzed_table.[state], CAST(analyzed_table.[date_column] AS date), CAST(analyzed_table.[date_column] AS date) - ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) - - + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -30611,98 +31993,275 @@ spec: ) THEN 1 ELSE 0 - END + END + ) + THEN 4 + WHEN COUNT_BIG(analyzed_table.[target_column]) = + SUM( + CASE + WHEN TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT_BIG(analyzed_table.[target_column]) = + SUM( + CASE + WHEN LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT_BIG(analyzed_table.[target_column]) = + SUM( + CASE WHEN analyzed_table.[target_column] IS NULL OR + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^0-9]%' OR + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-+0123456789.,]%' OR + LEN(CAST(analyzed_table.[target_column] AS VARCHAR)) <= 10 + AND( + TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 110) IS NOT NULL -- mm-dd-yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 101) IS NOT NULL -- mm/dd/yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 105) IS NOT NULL -- dd-mm-yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 103) IS NOT NULL -- dd/mm/yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 104) IS NOT NULL -- dd.mm.yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 107) IS NOT NULL -- Mon dd, yyyy + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 102) IS NOT NULL -- yyyy.mm.dd + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 111) IS NOT NULL -- yyyy/mm/dd + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 23) IS NOT NULL -- yyyy-mm-dd + ) + OR + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-0-9./:APMapm ]%' + AND ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]0[1-9][-/.]%' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]1[0-2][-/.]%' + ) AND ( + ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '0[1-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[12][0-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '3[01][-/.]__[-/.][0-9][0-9][0-9][0-9] %' + ) OR ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]0[1-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.][12][0-9] %' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]3[01] %' + ) + ) AND ( + CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[0][0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[1][0-2][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[01][0-9][:][0-5][0-9][:][0-5][0-9]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[2][0-3][:][0-5][0-9][:][0-5][0-9]' + OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9]' + ) + OR + TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn + OR LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + THEN 0 + WHEN TRIM(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value, + DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) AS time_period, + CAST((DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1)) AS DATETIME) AS time_period_utc + FROM [your_sql_server_database].[].[] AS analyzed_table + GROUP BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1), DATEADD(month, DATEDIFF(month, 0, analyzed_table.[date_column]), 0) + ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + + + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END ) THEN 4 - WHEN COUNT_BIG(analyzed_table.[target_column]) = + WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - THEN 1 + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 ELSE 0 END ) THEN 5 - WHEN COUNT_BIG(analyzed_table.[target_column]) = + WHEN COUNT(analyzed_table."target_column") = SUM( CASE - WHEN LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 1 ELSE 0 - END + END ) THEN 6 - WHEN COUNT_BIG(analyzed_table.[target_column]) = + WHEN COUNT(analyzed_table."target_column") = SUM( - CASE WHEN analyzed_table.[target_column] IS NULL OR - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^0-9]%' OR - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-+0123456789.,]%' OR - LEN(CAST(analyzed_table.[target_column] AS VARCHAR)) <= 10 - AND( - TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 110) IS NOT NULL -- mm-dd-yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 101) IS NOT NULL -- mm/dd/yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 105) IS NOT NULL -- dd-mm-yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 103) IS NOT NULL -- dd/mm/yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 104) IS NOT NULL -- dd.mm.yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 107) IS NOT NULL -- Mon dd, yyyy - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 102) IS NOT NULL -- yyyy.mm.dd - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 111) IS NOT NULL -- yyyy/mm/dd - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 23) IS NOT NULL -- yyyy-mm-dd - ) - OR - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) NOT LIKE '%[^-0-9./:APMapm ]%' - AND ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]0[1-9][-/.]%' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[-/.]1[0-2][-/.]%' - ) AND ( - ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '0[1-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[12][0-9][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '3[01][-/.]__[-/.][0-9][0-9][0-9][0-9] %' - ) OR ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]0[1-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.][12][0-9] %' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '[0-9][0-9][0-9][0-9][-/.]__[-/.]3[01] %' - ) - ) AND ( - CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[0][0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[1][0-2][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9] [AaPp][Mm]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[01][0-9][:][0-5][0-9][:][0-5][0-9]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '%[2][0-3][:][0-5][0-9][:][0-5][0-9]' - OR CAST(analyzed_table.[target_column] AS NVARCHAR(MAX)) LIKE '% [0-9][:][0-5][0-9][:][0-5][0-9]' - ) - OR - TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 120) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 121) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 126) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 127) IS NOT NULL -- yyyy-mm-dd T hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 20) IS NOT NULL -- yyyy-mm-dd hh:mm:ss - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 21) IS NOT NULL -- yyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 25) IS NOT NULL -- yyyyy-mm-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 26) IS NOT NULL -- yyyyy-dd-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 27) IS NOT NULL -- ymm-dd-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 28) IS NOT NULL -- ymm-yyyy-dd hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 29) IS NOT NULL -- ydd-mm-yyyy hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 30) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 109) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR TRY_CONVERT(date, CAST(analyzed_table.[target_column] AS VARCHAR), 113) IS NOT NULL -- ydd-yyyy-mm hh:mm:ss:nnn - OR LOWER(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) IN ('TRUE', 'FALSE', 'YES', 'NO', 'Y', 'N', 'T', 'F') + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL THEN 0 - WHEN TRIM(CAST(analyzed_table.[target_column] AS NVARCHAR(MAX))) <> '' + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' THEN 1 ELSE 0 END @@ -30710,13 +32269,11 @@ spec: THEN 7 ELSE 8 END AS actual_value, - DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) AS time_period, - CAST((DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1)) AS DATETIME) AS time_period_utc - FROM [your_sql_server_database].[].[] AS analyzed_table - GROUP BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1), DATEADD(month, DATEDIFF(month, 0, analyzed_table.[date_column]), 0) - ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) - - + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -34168,6 +35725,181 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN NULL + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT(analyzed_table."target_column") = + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST(analyzed_table."target_column" AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/datetime/date-in-range-percent.md b/docs/checks/column/datetime/date-in-range-percent.md index 99bf34824b..4fe6be55b6 100644 --- a/docs/checks/column/datetime/date-in-range-percent.md +++ b/docs/checks/column/datetime/date-in-range-percent.md @@ -822,6 +822,45 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_date_format_cast() }} >= {{ lib.make_text_constant(parameters.min_date) }} AND {{ lib.render_date_format_cast() }} <= {{ lib.make_text_constant(parameters.max_date) }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN CAST(analyzed_table."target_column" AS DATE) >= '1900-01-02' AND CAST(analyzed_table."target_column" AS DATE) <= '2099-12-30' THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1665,6 +1704,47 @@ Expand the *Configure with data grouping* section to see additional examples for , level_1, level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_date_format_cast() }} >= {{ lib.make_text_constant(parameters.min_date) }} AND {{ lib.render_date_format_cast() }} <= {{ lib.make_text_constant(parameters.max_date) }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN CAST(analyzed_table."target_column" AS DATE) >= '1900-01-02' AND CAST(analyzed_table."target_column" AS DATE) <= '2099-12-30' THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2535,6 +2615,45 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_date_format_cast() }} >= {{ lib.make_text_constant(parameters.min_date) }} AND {{ lib.render_date_format_cast() }} <= {{ lib.make_text_constant(parameters.max_date) }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN CAST(analyzed_table."target_column" AS DATE) >= '1900-01-02' AND CAST(analyzed_table."target_column" AS DATE) <= '2099-12-30' THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3379,6 +3498,47 @@ Expand the *Configure with data grouping* section to see additional examples for , level_1, level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_date_format_cast() }} >= {{ lib.make_text_constant(parameters.min_date) }} AND {{ lib.render_date_format_cast() }} <= {{ lib.make_text_constant(parameters.max_date) }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN CAST(analyzed_table."target_column" AS DATE) >= '1900-01-02' AND CAST(analyzed_table."target_column" AS DATE) <= '2099-12-30' THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4249,6 +4409,45 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_date_format_cast() }} >= {{ lib.make_text_constant(parameters.min_date) }} AND {{ lib.render_date_format_cast() }} <= {{ lib.make_text_constant(parameters.max_date) }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN CAST(analyzed_table."target_column" AS DATE) >= '1900-01-02' AND CAST(analyzed_table."target_column" AS DATE) <= '2099-12-30' THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5093,6 +5292,47 @@ Expand the *Configure with data grouping* section to see additional examples for , level_1, level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_date_format_cast() }} >= {{ lib.make_text_constant(parameters.min_date) }} AND {{ lib.render_date_format_cast() }} <= {{ lib.make_text_constant(parameters.max_date) }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN CAST(analyzed_table."target_column" AS DATE) >= '1900-01-02' AND CAST(analyzed_table."target_column" AS DATE) <= '2099-12-30' THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6046,6 +6286,49 @@ spec: FROM [your_sql_server_database].[].[] AS analyzed_table GROUP BY CAST(analyzed_table.[date_column] AS date), CAST(analyzed_table.[date_column] AS date)ORDER BY CAST(.[date_column] AS date) ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_date_format_cast() }} >= {{ lib.make_text_constant(parameters.min_date) }} AND {{ lib.render_date_format_cast() }} <= {{ lib.make_text_constant(parameters.max_date) }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN CAST(analyzed_table."target_column" AS DATE) >= '1900-01-02' AND CAST(analyzed_table."target_column" AS DATE) <= '2099-12-30' THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6946,6 +7229,49 @@ Expand the *Configure with data grouping* section to see additional examples for FROM [your_sql_server_database].[].[] AS analyzed_table GROUP BY analyzed_table.[country], analyzed_table.[state], CAST(analyzed_table.[date_column] AS date), CAST(analyzed_table.[date_column] AS date)ORDER BY CAST(.[date_column] AS date)level_1, level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_date_format_cast() }} >= {{ lib.make_text_constant(parameters.min_date) }} AND {{ lib.render_date_format_cast() }} <= {{ lib.make_text_constant(parameters.max_date) }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN CAST(analyzed_table."target_column" AS DATE) >= '1900-01-02' AND CAST(analyzed_table."target_column" AS DATE) <= '2099-12-30' THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -7903,6 +8229,49 @@ spec: FROM [your_sql_server_database].[].[] AS analyzed_table GROUP BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1), DATEADD(month, DATEDIFF(month, 0, analyzed_table.[date_column]), 0)ORDER BY DATEFROMPARTS(YEAR(CAST(.[date_column] AS date)), MONTH(CAST(.[date_column] AS date)), 1) ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_date_format_cast() }} >= {{ lib.make_text_constant(parameters.min_date) }} AND {{ lib.render_date_format_cast() }} <= {{ lib.make_text_constant(parameters.max_date) }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN CAST(analyzed_table."target_column" AS DATE) >= '1900-01-02' AND CAST(analyzed_table."target_column" AS DATE) <= '2099-12-30' THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -8803,6 +9172,49 @@ Expand the *Configure with data grouping* section to see additional examples for FROM [your_sql_server_database].[].[] AS analyzed_table GROUP BY analyzed_table.[country], analyzed_table.[state], DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1), DATEADD(month, DATEDIFF(month, 0, analyzed_table.[date_column]), 0)ORDER BY DATEFROMPARTS(YEAR(CAST(.[date_column] AS date)), MONTH(CAST(.[date_column] AS date)), 1)level_1, level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_date_format_cast() }} >= {{ lib.make_text_constant(parameters.min_date) }} AND {{ lib.render_date_format_cast() }} <= {{ lib.make_text_constant(parameters.max_date) }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN CAST(analyzed_table."target_column" AS DATE) >= '1900-01-02' AND CAST(analyzed_table."target_column" AS DATE) <= '2099-12-30' THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/datetime/date-values-in-future-percent.md b/docs/checks/column/datetime/date-values-in-future-percent.md index ca25c66fa8..3f4368cb37 100644 --- a/docs/checks/column/datetime/date-values-in-future-percent.md +++ b/docs/checks/column/datetime/date-values-in-future-percent.md @@ -991,6 +991,57 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + {% if lib.is_instant(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% elif lib.is_local_date(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > (CURRENT_DATE + INTERVAL {{((parameters.max_future_days) * 1) | int}} DAY) + {% elif lib.is_local_date_time(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_DATETIME() + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% else -%} + CAST({{ lib.render_target_column('analyzed_table') }} AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% endif -%} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + CAST(analyzed_table."target_column" AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL 0 SECOND + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2019,6 +2070,59 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + {% if lib.is_instant(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% elif lib.is_local_date(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > (CURRENT_DATE + INTERVAL {{((parameters.max_future_days) * 1) | int}} DAY) + {% elif lib.is_local_date_time(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_DATETIME() + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% else -%} + CAST({{ lib.render_target_column('analyzed_table') }} AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% endif -%} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + CAST(analyzed_table."target_column" AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL 0 SECOND + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3073,6 +3177,57 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + {% if lib.is_instant(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% elif lib.is_local_date(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > (CURRENT_DATE + INTERVAL {{((parameters.max_future_days) * 1) | int}} DAY) + {% elif lib.is_local_date_time(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_DATETIME() + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% else -%} + CAST({{ lib.render_target_column('analyzed_table') }} AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% endif -%} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + CAST(analyzed_table."target_column" AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL 0 SECOND + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4102,6 +4257,59 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + {% if lib.is_instant(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% elif lib.is_local_date(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > (CURRENT_DATE + INTERVAL {{((parameters.max_future_days) * 1) | int}} DAY) + {% elif lib.is_local_date_time(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_DATETIME() + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% else -%} + CAST({{ lib.render_target_column('analyzed_table') }} AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% endif -%} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + CAST(analyzed_table."target_column" AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL 0 SECOND + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5156,6 +5364,57 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + {% if lib.is_instant(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% elif lib.is_local_date(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > (CURRENT_DATE + INTERVAL {{((parameters.max_future_days) * 1) | int}} DAY) + {% elif lib.is_local_date_time(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_DATETIME() + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% else -%} + CAST({{ lib.render_target_column('analyzed_table') }} AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% endif -%} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + CAST(analyzed_table."target_column" AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL 0 SECOND + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6185,6 +6444,59 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + {% if lib.is_instant(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% elif lib.is_local_date(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > (CURRENT_DATE + INTERVAL {{((parameters.max_future_days) * 1) | int}} DAY) + {% elif lib.is_local_date_time(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_DATETIME() + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% else -%} + CAST({{ lib.render_target_column('analyzed_table') }} AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% endif -%} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + CAST(analyzed_table."target_column" AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL 0 SECOND + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -7324,6 +7636,61 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + {% if lib.is_instant(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% elif lib.is_local_date(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > (CURRENT_DATE + INTERVAL {{((parameters.max_future_days) * 1) | int}} DAY) + {% elif lib.is_local_date_time(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_DATETIME() + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% else -%} + CAST({{ lib.render_target_column('analyzed_table') }} AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% endif -%} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + CAST(analyzed_table."target_column" AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL 0 SECOND + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8410,6 +8777,61 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + {% if lib.is_instant(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% elif lib.is_local_date(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > (CURRENT_DATE + INTERVAL {{((parameters.max_future_days) * 1) | int}} DAY) + {% elif lib.is_local_date_time(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_DATETIME() + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% else -%} + CAST({{ lib.render_target_column('analyzed_table') }} AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% endif -%} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + CAST(analyzed_table."target_column" AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL 0 SECOND + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -9553,6 +9975,61 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + {% if lib.is_instant(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% elif lib.is_local_date(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > (CURRENT_DATE + INTERVAL {{((parameters.max_future_days) * 1) | int}} DAY) + {% elif lib.is_local_date_time(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_DATETIME() + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% else -%} + CAST({{ lib.render_target_column('analyzed_table') }} AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% endif -%} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + CAST(analyzed_table."target_column" AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL 0 SECOND + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -10639,6 +11116,61 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + {% if lib.is_instant(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% elif lib.is_local_date(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > (CURRENT_DATE + INTERVAL {{((parameters.max_future_days) * 1) | int}} DAY) + {% elif lib.is_local_date_time(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_DATETIME() + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% else -%} + CAST({{ lib.render_target_column('analyzed_table') }} AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% endif -%} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + CAST(analyzed_table."target_column" AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL 0 SECOND + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/datetime/text-match-date-format-percent.md b/docs/checks/column/datetime/text-match-date-format-percent.md index feb8a5bfa6..13e240fa51 100644 --- a/docs/checks/column/datetime/text-match-date-format-percent.md +++ b/docs/checks/column/datetime/text-match-date-format-percent.md @@ -858,6 +858,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), {{lib.render_date_format_regex(parameters.date_format)}}) IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1744,6 +1787,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), {{lib.render_date_format_regex(parameters.date_format)}}) IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2656,6 +2744,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), {{lib.render_date_format_regex(parameters.date_format)}}) IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3543,6 +3674,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), {{lib.render_date_format_regex(parameters.date_format)}}) IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4455,6 +4631,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), {{lib.render_date_format_regex(parameters.date_format)}}) IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5342,6 +5561,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), {{lib.render_date_format_regex(parameters.date_format)}}) IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6339,6 +6603,53 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), {{lib.render_date_format_regex(parameters.date_format)}}) IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7283,6 +7594,53 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), {{lib.render_date_format_regex(parameters.date_format)}}) IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8284,6 +8642,53 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), {{lib.render_date_format_regex(parameters.date_format)}}) IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9228,6 +9633,53 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), {{lib.render_date_format_regex(parameters.date_format)}}) IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/integrity/lookup-key-found-percent.md b/docs/checks/column/integrity/lookup-key-found-percent.md index 38ba954c49..74a7edb5bb 100644 --- a/docs/checks/column/integrity/lookup-key-found-percent.md +++ b/docs/checks/column/integrity/lookup-key-found-percent.md @@ -808,6 +808,45 @@ spec: LEFT OUTER JOIN public.dim_customer AS foreign_table ON analyzed_table.[target_column] = foreign_table.[customer_id] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1639,6 +1678,47 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2496,6 +2576,45 @@ spec: LEFT OUTER JOIN public.dim_customer AS foreign_table ON analyzed_table.[target_column] = foreign_table.[customer_id] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3328,6 +3447,47 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4185,6 +4345,45 @@ spec: LEFT OUTER JOIN public.dim_customer AS foreign_table ON analyzed_table.[target_column] = foreign_table.[customer_id] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5017,6 +5216,47 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5959,6 +6199,49 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6848,6 +7131,49 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7794,6 +8120,49 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8683,6 +9052,49 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/integrity/lookup-key-not-found.md b/docs/checks/column/integrity/lookup-key-not-found.md index 6bd9e05e7e..22a9ad45e3 100644 --- a/docs/checks/column/integrity/lookup-key-not-found.md +++ b/docs/checks/column/integrity/lookup-key-not-found.md @@ -841,6 +841,45 @@ spec: LEFT OUTER JOIN public.dim_customer AS foreign_table ON analyzed_table.[target_column] = foreign_table.[customer_id] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1706,6 +1745,47 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2597,6 +2677,45 @@ spec: LEFT OUTER JOIN public.dim_customer AS foreign_table ON analyzed_table.[target_column] = foreign_table.[customer_id] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3463,6 +3582,47 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4354,6 +4514,45 @@ spec: LEFT OUTER JOIN public.dim_customer AS foreign_table ON analyzed_table.[target_column] = foreign_table.[customer_id] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5220,6 +5419,47 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6196,6 +6436,49 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7119,6 +7402,49 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8099,6 +8425,49 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9022,6 +9391,49 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN foreign_table."customer_id" IS NULL AND analyzed_table."target_column" IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + LEFT OUTER JOIN public.dim_customer AS foreign_table + ON analyzed_table."target_column" = foreign_table."customer_id" + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/nulls/empty-column-found.md b/docs/checks/column/nulls/empty-column-found.md index 73aa021712..8c9e8e45c8 100644 --- a/docs/checks/column/nulls/empty-column-found.md +++ b/docs/checks/column/nulls/empty-column-found.md @@ -535,6 +535,30 @@ spec: COUNT_BIG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1087,6 +1111,32 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1653,6 +1703,30 @@ spec: COUNT_BIG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2206,6 +2280,32 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2772,6 +2872,30 @@ spec: COUNT_BIG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3325,6 +3449,32 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3976,6 +4126,34 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4586,6 +4764,34 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5241,6 +5447,34 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5851,6 +6085,34 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/nulls/not-nulls-count.md b/docs/checks/column/nulls/not-nulls-count.md index 0193ba7d9e..dddd207555 100644 --- a/docs/checks/column/nulls/not-nulls-count.md +++ b/docs/checks/column/nulls/not-nulls-count.md @@ -546,6 +546,30 @@ spec: COUNT_BIG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1099,6 +1123,32 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1678,6 +1728,30 @@ spec: COUNT_BIG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2232,6 +2306,32 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2811,6 +2911,30 @@ spec: COUNT_BIG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3365,6 +3489,32 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4029,6 +4179,34 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4640,6 +4818,34 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5308,6 +5514,34 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5919,6 +6153,34 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/nulls/not-nulls-percent.md b/docs/checks/column/nulls/not-nulls-percent.md index e5b50f5ffb..116c61a91b 100644 --- a/docs/checks/column/nulls/not-nulls-percent.md +++ b/docs/checks/column/nulls/not-nulls-percent.md @@ -623,6 +623,34 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT({{ lib.render_target_column('analyzed_table') }}) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT(analyzed_table."target_column") / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1258,6 +1286,36 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT({{ lib.render_target_column('analyzed_table') }}) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT(analyzed_table."target_column") / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1919,6 +1977,34 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT({{ lib.render_target_column('analyzed_table') }}) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT(analyzed_table."target_column") / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2555,6 +2641,36 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT({{ lib.render_target_column('analyzed_table') }}) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT(analyzed_table."target_column") / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3216,6 +3332,34 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT({{ lib.render_target_column('analyzed_table') }}) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT(analyzed_table."target_column") / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3852,6 +3996,36 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT({{ lib.render_target_column('analyzed_table') }}) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT(analyzed_table."target_column") / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4598,6 +4772,38 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT({{ lib.render_target_column('analyzed_table') }}) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT(analyzed_table."target_column") / COUNT(*) + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5291,6 +5497,38 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT({{ lib.render_target_column('analyzed_table') }}) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT(analyzed_table."target_column") / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -6041,6 +6279,38 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT({{ lib.render_target_column('analyzed_table') }}) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT(analyzed_table."target_column") / COUNT(*) + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6734,6 +7004,38 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT({{ lib.render_target_column('analyzed_table') }}) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT(analyzed_table."target_column") / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/nulls/nulls-count.md b/docs/checks/column/nulls/nulls-count.md index 5dd2e74d1b..bcbf6aac59 100644 --- a/docs/checks/column/nulls/nulls-count.md +++ b/docs/checks/column/nulls/nulls-count.md @@ -692,6 +692,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1401,6 +1433,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2136,6 +2202,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2846,6 +2944,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3581,6 +3713,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4291,6 +4455,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5111,6 +5309,42 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5878,6 +6112,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -6702,6 +6972,42 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7469,6 +7775,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/nulls/nulls-percent-anomaly.md b/docs/checks/column/nulls/nulls-percent-anomaly.md index b0e11880e8..b811ae0bac 100644 --- a/docs/checks/column/nulls/nulls-percent-anomaly.md +++ b/docs/checks/column/nulls/nulls-percent-anomaly.md @@ -784,6 +784,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1591,6 +1629,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2424,6 +2502,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3232,6 +3348,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4150,6 +4306,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5015,6 +5213,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/nulls/nulls-percent-change-1-day.md b/docs/checks/column/nulls/nulls-percent-change-1-day.md index e12c0486c9..81f108f4a0 100644 --- a/docs/checks/column/nulls/nulls-percent-change-1-day.md +++ b/docs/checks/column/nulls/nulls-percent-change-1-day.md @@ -788,6 +788,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1598,6 +1636,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2434,6 +2512,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3245,6 +3361,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4166,6 +4322,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5034,6 +5232,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/nulls/nulls-percent-change-30-days.md b/docs/checks/column/nulls/nulls-percent-change-30-days.md index ae9738f025..eb37c64143 100644 --- a/docs/checks/column/nulls/nulls-percent-change-30-days.md +++ b/docs/checks/column/nulls/nulls-percent-change-30-days.md @@ -788,6 +788,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1598,6 +1636,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2434,6 +2512,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3245,6 +3361,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4166,6 +4322,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5034,6 +5232,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/nulls/nulls-percent-change-7-days.md b/docs/checks/column/nulls/nulls-percent-change-7-days.md index c652535b83..e1d31c08e9 100644 --- a/docs/checks/column/nulls/nulls-percent-change-7-days.md +++ b/docs/checks/column/nulls/nulls-percent-change-7-days.md @@ -788,6 +788,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1598,6 +1636,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2434,6 +2512,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3245,6 +3361,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4166,6 +4322,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5034,6 +5232,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/nulls/nulls-percent-change.md b/docs/checks/column/nulls/nulls-percent-change.md index d38e2df373..25252337d2 100644 --- a/docs/checks/column/nulls/nulls-percent-change.md +++ b/docs/checks/column/nulls/nulls-percent-change.md @@ -785,6 +785,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1592,6 +1630,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2425,6 +2503,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3233,6 +3349,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4151,6 +4307,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5016,6 +5214,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/nulls/nulls-percent.md b/docs/checks/column/nulls/nulls-percent.md index 09c89d2521..babfdaa2da 100644 --- a/docs/checks/column/nulls/nulls-percent.md +++ b/docs/checks/column/nulls/nulls-percent.md @@ -785,6 +785,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1592,6 +1630,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2425,6 +2503,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3233,6 +3349,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4066,6 +4222,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4874,6 +5068,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5792,6 +6026,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6657,6 +6933,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7579,6 +7897,48 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8444,6 +8804,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/integer-in-range-percent.md b/docs/checks/column/numeric/integer-in-range-percent.md index d5c35877db..1f3e465c26 100644 --- a/docs/checks/column/numeric/integer-in-range-percent.md +++ b/docs/checks/column/numeric/integer-in-range-percent.md @@ -785,6 +785,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1593,6 +1631,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2427,6 +2505,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3236,6 +3352,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4070,6 +4226,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4879,6 +5073,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5798,6 +6032,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6664,6 +6940,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7587,6 +7905,48 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8453,6 +8813,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/invalid-latitude.md b/docs/checks/column/numeric/invalid-latitude.md index efc13e3bd4..64e5dcf0b2 100644 --- a/docs/checks/column/numeric/invalid-latitude.md +++ b/docs/checks/column/numeric/invalid-latitude.md @@ -694,6 +694,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -90.0 OR {{ lib.render_target_column('analyzed_table') }} > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -90.0 OR analyzed_table."target_column" > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1405,6 +1437,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -90.0 OR {{ lib.render_target_column('analyzed_table') }} > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -90.0 OR analyzed_table."target_column" > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2142,6 +2208,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -90.0 OR {{ lib.render_target_column('analyzed_table') }} > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -90.0 OR analyzed_table."target_column" > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2854,6 +2952,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -90.0 OR {{ lib.render_target_column('analyzed_table') }} > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -90.0 OR analyzed_table."target_column" > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3591,6 +3723,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -90.0 OR {{ lib.render_target_column('analyzed_table') }} > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -90.0 OR analyzed_table."target_column" > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4303,6 +4467,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -90.0 OR {{ lib.render_target_column('analyzed_table') }} > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -90.0 OR analyzed_table."target_column" > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5125,6 +5323,42 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -90.0 OR {{ lib.render_target_column('analyzed_table') }} > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -90.0 OR analyzed_table."target_column" > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5894,6 +6128,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -90.0 OR {{ lib.render_target_column('analyzed_table') }} > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -90.0 OR analyzed_table."target_column" > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -6720,6 +6990,42 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -90.0 OR {{ lib.render_target_column('analyzed_table') }} > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -90.0 OR analyzed_table."target_column" > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7489,6 +7795,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -90.0 OR {{ lib.render_target_column('analyzed_table') }} > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -90.0 OR analyzed_table."target_column" > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/invalid-longitude.md b/docs/checks/column/numeric/invalid-longitude.md index 3f1e6be70e..434e801095 100644 --- a/docs/checks/column/numeric/invalid-longitude.md +++ b/docs/checks/column/numeric/invalid-longitude.md @@ -692,6 +692,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -180.0 OR {{ lib.render_target_column('analyzed_table') }} > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -180.0 OR analyzed_table."target_column" > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1401,6 +1433,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -180.0 OR {{ lib.render_target_column('analyzed_table') }} > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -180.0 OR analyzed_table."target_column" > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2136,6 +2202,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -180.0 OR {{ lib.render_target_column('analyzed_table') }} > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -180.0 OR analyzed_table."target_column" > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2846,6 +2944,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -180.0 OR {{ lib.render_target_column('analyzed_table') }} > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -180.0 OR analyzed_table."target_column" > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3581,6 +3713,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -180.0 OR {{ lib.render_target_column('analyzed_table') }} > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -180.0 OR analyzed_table."target_column" > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4291,6 +4455,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -180.0 OR {{ lib.render_target_column('analyzed_table') }} > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -180.0 OR analyzed_table."target_column" > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5111,6 +5309,42 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -180.0 OR {{ lib.render_target_column('analyzed_table') }} > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -180.0 OR analyzed_table."target_column" > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5878,6 +6112,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -180.0 OR {{ lib.render_target_column('analyzed_table') }} > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -180.0 OR analyzed_table."target_column" > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -6702,6 +6972,42 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -180.0 OR {{ lib.render_target_column('analyzed_table') }} > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -180.0 OR analyzed_table."target_column" > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7469,6 +7775,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -180.0 OR {{ lib.render_target_column('analyzed_table') }} > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < -180.0 OR analyzed_table."target_column" > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/max-in-range.md b/docs/checks/column/numeric/max-in-range.md index 465c63b2ab..f8d2905499 100644 --- a/docs/checks/column/numeric/max-in-range.md +++ b/docs/checks/column/numeric/max-in-range.md @@ -529,6 +529,28 @@ spec: MAX(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1065,6 +1087,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1627,6 +1673,28 @@ spec: MAX(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2164,6 +2232,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2726,6 +2818,28 @@ spec: MAX(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3263,6 +3377,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3910,6 +4048,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4504,6 +4668,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5155,6 +5345,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5749,6 +5965,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/mean-in-range.md b/docs/checks/column/numeric/mean-in-range.md index 4b3915b61b..e70880ffa3 100644 --- a/docs/checks/column/numeric/mean-in-range.md +++ b/docs/checks/column/numeric/mean-in-range.md @@ -529,6 +529,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1065,6 +1087,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1627,6 +1673,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2164,6 +2232,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2726,6 +2818,28 @@ spec: AVG(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3263,6 +3377,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3910,6 +4048,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4504,6 +4668,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5155,6 +5345,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5749,6 +5965,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/median-in-range.md b/docs/checks/column/numeric/median-in-range.md index 2535db753e..2fe8327f03 100644 --- a/docs/checks/column/numeric/median-in-range.md +++ b/docs/checks/column/numeric/median-in-range.md @@ -797,6 +797,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1632,6 +1657,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2496,6 +2548,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3332,6 +3409,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4196,6 +4300,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5032,6 +5161,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5990,6 +6146,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6909,6 +7094,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -7873,6 +8087,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -8792,6 +9035,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.5) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/numeric/min-in-range.md b/docs/checks/column/numeric/min-in-range.md index d7a52be399..ada673c236 100644 --- a/docs/checks/column/numeric/min-in-range.md +++ b/docs/checks/column/numeric/min-in-range.md @@ -529,6 +529,28 @@ spec: MIN(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1065,6 +1087,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1627,6 +1673,28 @@ spec: MIN(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2164,6 +2232,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2726,6 +2818,28 @@ spec: MIN(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3263,6 +3377,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3910,6 +4048,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4504,6 +4668,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5155,6 +5345,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5749,6 +5965,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/negative-values-percent.md b/docs/checks/column/numeric/negative-values-percent.md index 0a4577468e..6ba8d90b40 100644 --- a/docs/checks/column/numeric/negative-values-percent.md +++ b/docs/checks/column/numeric/negative-values-percent.md @@ -783,6 +783,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1589,6 +1627,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2421,6 +2499,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3228,6 +3344,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4060,6 +4216,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4867,6 +5061,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5784,6 +6018,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6648,6 +6924,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7569,6 +7887,48 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8433,6 +8793,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/negative-values.md b/docs/checks/column/numeric/negative-values.md index b427e3bad8..44b808cb55 100644 --- a/docs/checks/column/numeric/negative-values.md +++ b/docs/checks/column/numeric/negative-values.md @@ -695,6 +695,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1408,6 +1440,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2147,6 +2213,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2861,6 +2959,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3600,6 +3732,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4314,6 +4478,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5138,6 +5336,42 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5909,6 +6143,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -6737,6 +7007,42 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7508,6 +7814,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/non-negative-values-percent.md b/docs/checks/column/numeric/non-negative-values-percent.md index 5fd0ec41b2..508ff7e78f 100644 --- a/docs/checks/column/numeric/non-negative-values-percent.md +++ b/docs/checks/column/numeric/non-negative-values-percent.md @@ -784,6 +784,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1591,6 +1629,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2424,6 +2502,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3232,6 +3348,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4065,6 +4221,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4873,6 +5067,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5791,6 +6025,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6656,6 +6932,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7578,6 +7896,48 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8443,6 +8803,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/non-negative-values.md b/docs/checks/column/numeric/non-negative-values.md index 6a1297ed22..ef5fe45ffd 100644 --- a/docs/checks/column/numeric/non-negative-values.md +++ b/docs/checks/column/numeric/non-negative-values.md @@ -693,6 +693,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1404,6 +1436,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2141,6 +2207,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2853,6 +2951,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3590,6 +3722,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4302,6 +4466,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5124,6 +5322,42 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5893,6 +6127,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -6719,6 +6989,42 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7488,6 +7794,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/number-above-max-value-percent.md b/docs/checks/column/numeric/number-above-max-value-percent.md index 6a4876f20a..0b8e6d7083 100644 --- a/docs/checks/column/numeric/number-above-max-value-percent.md +++ b/docs/checks/column/numeric/number-above-max-value-percent.md @@ -786,6 +786,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1595,6 +1633,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2430,6 +2508,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3240,6 +3356,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4075,6 +4231,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4885,6 +5079,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5805,6 +6039,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6672,6 +6948,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7596,6 +7914,48 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8463,6 +8823,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/number-above-max-value.md b/docs/checks/column/numeric/number-above-max-value.md index 139e3de18c..85df1eb30e 100644 --- a/docs/checks/column/numeric/number-above-max-value.md +++ b/docs/checks/column/numeric/number-above-max-value.md @@ -702,6 +702,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1421,6 +1453,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2166,6 +2232,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2886,6 +2984,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3631,6 +3763,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4351,6 +4515,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5181,6 +5379,42 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5958,6 +6192,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -6792,6 +7062,42 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7569,6 +7875,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" > 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/number-below-min-value-percent.md b/docs/checks/column/numeric/number-below-min-value-percent.md index 160d1bf060..c234c708b5 100644 --- a/docs/checks/column/numeric/number-below-min-value-percent.md +++ b/docs/checks/column/numeric/number-below-min-value-percent.md @@ -786,6 +786,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1595,6 +1633,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2430,6 +2508,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3240,6 +3356,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4075,6 +4231,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4885,6 +5079,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5805,6 +6039,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6672,6 +6948,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7596,6 +7914,48 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8463,6 +8823,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/number-below-min-value.md b/docs/checks/column/numeric/number-below-min-value.md index 98b07b6844..be29242f39 100644 --- a/docs/checks/column/numeric/number-below-min-value.md +++ b/docs/checks/column/numeric/number-below-min-value.md @@ -702,6 +702,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1421,6 +1453,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2166,6 +2232,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2886,6 +2984,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3631,6 +3763,38 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4351,6 +4515,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5181,6 +5379,42 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5958,6 +6192,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -6792,6 +7062,42 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7569,6 +7875,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" < 0.0 THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/number-in-range-percent.md b/docs/checks/column/numeric/number-in-range-percent.md index 04860fca8b..41fb58152d 100644 --- a/docs/checks/column/numeric/number-in-range-percent.md +++ b/docs/checks/column/numeric/number-in-range-percent.md @@ -784,6 +784,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1591,6 +1629,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2424,6 +2502,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3232,6 +3348,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4065,6 +4221,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4873,6 +5067,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5791,6 +6025,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6656,6 +6932,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7578,6 +7896,48 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8443,6 +8803,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= AND analyzed_table."target_column" <= THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/percentile-10-in-range.md b/docs/checks/column/numeric/percentile-10-in-range.md index 1e7fd85295..e5d55c5efb 100644 --- a/docs/checks/column/numeric/percentile-10-in-range.md +++ b/docs/checks/column/numeric/percentile-10-in-range.md @@ -797,6 +797,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.1) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1632,6 +1657,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.1) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2496,6 +2548,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.1) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3332,6 +3409,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.1) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4196,6 +4300,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.1) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5032,6 +5161,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.1) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5990,6 +6146,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.1) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6909,6 +7094,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.1) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -7873,6 +8087,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.1) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -8792,6 +9035,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.1) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/numeric/percentile-25-in-range.md b/docs/checks/column/numeric/percentile-25-in-range.md index 472549bd25..b23ae94e00 100644 --- a/docs/checks/column/numeric/percentile-25-in-range.md +++ b/docs/checks/column/numeric/percentile-25-in-range.md @@ -797,6 +797,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.25) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1632,6 +1657,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.25) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2496,6 +2548,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.25) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3332,6 +3409,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.25) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4196,6 +4300,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.25) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5032,6 +5161,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.25) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5990,6 +6146,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.25) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6909,6 +7094,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.25) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -7873,6 +8087,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.25) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -8792,6 +9035,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.25) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/numeric/percentile-75-in-range.md b/docs/checks/column/numeric/percentile-75-in-range.md index 16ffa756e9..6b68ebf880 100644 --- a/docs/checks/column/numeric/percentile-75-in-range.md +++ b/docs/checks/column/numeric/percentile-75-in-range.md @@ -797,6 +797,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.75) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1632,6 +1657,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.75) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2496,6 +2548,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.75) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3332,6 +3409,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.75) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4196,6 +4300,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.75) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5032,6 +5161,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.75) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5990,6 +6146,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.75) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6909,6 +7094,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.75) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -7873,6 +8087,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.75) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -8792,6 +9035,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.75) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/numeric/percentile-90-in-range.md b/docs/checks/column/numeric/percentile-90-in-range.md index bbe592c08a..d31ea3e61c 100644 --- a/docs/checks/column/numeric/percentile-90-in-range.md +++ b/docs/checks/column/numeric/percentile-90-in-range.md @@ -797,6 +797,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.9) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1632,6 +1657,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.9) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2496,6 +2548,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.9) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3332,6 +3409,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.9) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4196,6 +4300,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.9) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5032,6 +5161,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.9) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5990,6 +6146,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.9) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6909,6 +7094,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.9) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -7873,6 +8087,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT(0.9) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -8792,6 +9035,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT(0.9) + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/numeric/percentile-in-range.md b/docs/checks/column/numeric/percentile-in-range.md index 2ed4cf4c40..c34246f34c 100644 --- a/docs/checks/column/numeric/percentile-in-range.md +++ b/docs/checks/column/numeric/percentile-in-range.md @@ -795,6 +795,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT() + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1628,6 +1653,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT() + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2490,6 +2542,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT() + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3324,6 +3401,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT() + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4186,6 +4290,31 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT() + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value + FROM ""."" analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5020,6 +5149,33 @@ Expand the *Configure with data grouping* section to see additional examples for ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table) AS nested_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT() + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5976,6 +6132,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT() + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6893,6 +7078,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT() + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -7855,6 +8069,35 @@ spec: GROUP BY nested_table.[time_period], nested_table.[time_period_utc] ORDER BY nested_table.[time_period], nested_table.[time_period_utc] ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + PERCENTILE_CONT() + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -8772,6 +9015,35 @@ Expand the *Configure with data grouping* section to see additional examples for analyzed_table.[country] AS grouping_level_1, analyzed_table.[state] AS grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + PERCENTILE_CONT() + WITHIN GROUP (ORDER BY analyzed_table."target_column" * 1.0) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/column/numeric/population-stddev-in-range.md b/docs/checks/column/numeric/population-stddev-in-range.md index a975a43504..0276efda8c 100644 --- a/docs/checks/column/numeric/population-stddev-in-range.md +++ b/docs/checks/column/numeric/population-stddev-in-range.md @@ -529,6 +529,28 @@ spec: STDEVP(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + STDDEV_POP(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1065,6 +1087,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + STDDEV_POP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1627,6 +1673,28 @@ spec: STDEVP(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + STDDEV_POP(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2164,6 +2232,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + STDDEV_POP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2726,6 +2818,28 @@ spec: STDEVP(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + STDDEV_POP(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3263,6 +3377,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + STDDEV_POP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3910,6 +4048,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + STDDEV_POP(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4504,6 +4668,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + STDDEV_POP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5155,6 +5345,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + STDDEV_POP(analyzed_table."target_column") AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5749,6 +5965,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + STDDEV_POP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/population-variance-in-range.md b/docs/checks/column/numeric/population-variance-in-range.md index ef2692ea6c..2ca0521901 100644 --- a/docs/checks/column/numeric/population-variance-in-range.md +++ b/docs/checks/column/numeric/population-variance-in-range.md @@ -529,6 +529,28 @@ spec: VARP(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + VAR_POP(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1065,6 +1087,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + VAR_POP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1627,6 +1673,28 @@ spec: VARP(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + VAR_POP(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2164,6 +2232,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + VAR_POP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2726,6 +2818,28 @@ spec: VARP(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + VAR_POP(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3263,6 +3377,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + VAR_POP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3910,6 +4048,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + VAR_POP(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4504,6 +4668,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + VAR_POP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5155,6 +5345,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + VAR_POP(analyzed_table."target_column") AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5749,6 +5965,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + VAR_POP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/sample-stddev-in-range.md b/docs/checks/column/numeric/sample-stddev-in-range.md index 54a43c2c65..8db98637bd 100644 --- a/docs/checks/column/numeric/sample-stddev-in-range.md +++ b/docs/checks/column/numeric/sample-stddev-in-range.md @@ -529,6 +529,28 @@ spec: STDEV(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + STDDEV_SAMP(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1065,6 +1087,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + STDDEV_SAMP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1627,6 +1673,28 @@ spec: STDEV(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + STDDEV_SAMP(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2164,6 +2232,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + STDDEV_SAMP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2726,6 +2818,28 @@ spec: STDEV(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + STDDEV_SAMP(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3263,6 +3377,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + STDDEV_SAMP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3910,6 +4048,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + STDDEV_SAMP(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4504,6 +4668,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + STDDEV_SAMP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5155,6 +5345,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + STDDEV_SAMP(analyzed_table."target_column") AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5749,6 +5965,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + STDDEV_SAMP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/sample-variance-in-range.md b/docs/checks/column/numeric/sample-variance-in-range.md index 8d89ae4d41..a24c3bb85f 100644 --- a/docs/checks/column/numeric/sample-variance-in-range.md +++ b/docs/checks/column/numeric/sample-variance-in-range.md @@ -530,6 +530,28 @@ spec: VARP(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + VAR_SAMP(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1069,6 +1091,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + VAR_SAMP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1632,6 +1678,28 @@ spec: VARP(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + VAR_SAMP(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2172,6 +2240,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + VAR_SAMP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2735,6 +2827,28 @@ spec: VARP(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + VAR_SAMP(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3275,6 +3389,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + VAR_SAMP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3923,6 +4061,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + VAR_SAMP(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4520,6 +4684,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + VAR_SAMP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5172,6 +5362,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + VAR_SAMP(analyzed_table."target_column") AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5769,6 +5985,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + VAR_SAMP(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/sum-in-range.md b/docs/checks/column/numeric/sum-in-range.md index 68458023a0..605f341ac3 100644 --- a/docs/checks/column/numeric/sum-in-range.md +++ b/docs/checks/column/numeric/sum-in-range.md @@ -529,6 +529,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1065,6 +1087,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1627,6 +1673,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2164,6 +2232,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2726,6 +2818,28 @@ spec: SUM(analyzed_table.[target_column]) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3263,6 +3377,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3910,6 +4048,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4504,6 +4668,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5155,6 +5345,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5749,6 +5965,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM(analyzed_table."target_column") AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/valid-latitude-percent.md b/docs/checks/column/numeric/valid-latitude-percent.md index 6a6feaf751..3275d0c6d1 100644 --- a/docs/checks/column/numeric/valid-latitude-percent.md +++ b/docs/checks/column/numeric/valid-latitude-percent.md @@ -785,6 +785,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -90.0 AND {{ lib.render_target_column('analyzed_table') }} <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -90.0 AND analyzed_table."target_column" <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1592,6 +1630,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -90.0 AND {{ lib.render_target_column('analyzed_table') }} <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -90.0 AND analyzed_table."target_column" <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2425,6 +2503,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -90.0 AND {{ lib.render_target_column('analyzed_table') }} <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -90.0 AND analyzed_table."target_column" <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3233,6 +3349,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -90.0 AND {{ lib.render_target_column('analyzed_table') }} <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -90.0 AND analyzed_table."target_column" <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4066,6 +4222,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -90.0 AND {{ lib.render_target_column('analyzed_table') }} <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -90.0 AND analyzed_table."target_column" <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4874,6 +5068,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -90.0 AND {{ lib.render_target_column('analyzed_table') }} <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -90.0 AND analyzed_table."target_column" <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5792,6 +6026,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -90.0 AND {{ lib.render_target_column('analyzed_table') }} <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -90.0 AND analyzed_table."target_column" <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6657,6 +6933,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -90.0 AND {{ lib.render_target_column('analyzed_table') }} <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -90.0 AND analyzed_table."target_column" <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7579,6 +7897,48 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -90.0 AND {{ lib.render_target_column('analyzed_table') }} <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -90.0 AND analyzed_table."target_column" <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8444,6 +8804,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -90.0 AND {{ lib.render_target_column('analyzed_table') }} <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -90.0 AND analyzed_table."target_column" <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/numeric/valid-longitude-percent.md b/docs/checks/column/numeric/valid-longitude-percent.md index 26778615e4..b25a7d79b5 100644 --- a/docs/checks/column/numeric/valid-longitude-percent.md +++ b/docs/checks/column/numeric/valid-longitude-percent.md @@ -785,6 +785,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -180.0 AND {{ lib.render_target_column('analyzed_table') }} <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -180.0 AND analyzed_table."target_column" <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1592,6 +1630,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -180.0 AND {{ lib.render_target_column('analyzed_table') }} <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -180.0 AND analyzed_table."target_column" <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2425,6 +2503,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -180.0 AND {{ lib.render_target_column('analyzed_table') }} <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -180.0 AND analyzed_table."target_column" <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3233,6 +3349,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -180.0 AND {{ lib.render_target_column('analyzed_table') }} <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -180.0 AND analyzed_table."target_column" <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4066,6 +4222,44 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -180.0 AND {{ lib.render_target_column('analyzed_table') }} <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -180.0 AND analyzed_table."target_column" <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4874,6 +5068,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -180.0 AND {{ lib.render_target_column('analyzed_table') }} <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -180.0 AND analyzed_table."target_column" <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5792,6 +6026,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -180.0 AND {{ lib.render_target_column('analyzed_table') }} <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -180.0 AND analyzed_table."target_column" <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6657,6 +6933,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -180.0 AND {{ lib.render_target_column('analyzed_table') }} <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -180.0 AND analyzed_table."target_column" <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7579,6 +7897,48 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -180.0 AND {{ lib.render_target_column('analyzed_table') }} <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -180.0 AND analyzed_table."target_column" <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8444,6 +8804,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -180.0 AND {{ lib.render_target_column('analyzed_table') }} <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" >= -180.0 AND analyzed_table."target_column" <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/invalid-email-format-found.md b/docs/checks/column/patterns/invalid-email-format-found.md index bd4ec625ec..e08c0caca6 100644 --- a/docs/checks/column/patterns/invalid-email-format-found.md +++ b/docs/checks/column/patterns/invalid-email-format-found.md @@ -736,6 +736,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1491,6 +1527,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2272,6 +2346,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3028,6 +3138,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3809,6 +3957,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4565,6 +4749,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5431,6 +5653,46 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6244,6 +6506,46 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7114,6 +7416,46 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7927,6 +8269,46 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/invalid-email-format-percent.md b/docs/checks/column/patterns/invalid-email-format-percent.md index adcf3e7d84..45e2a2d1ad 100644 --- a/docs/checks/column/patterns/invalid-email-format-percent.md +++ b/docs/checks/column/patterns/invalid-email-format-percent.md @@ -860,6 +860,50 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1749,6 +1793,52 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2664,6 +2754,50 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3554,6 +3688,52 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4469,6 +4649,50 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5359,6 +5583,52 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6359,6 +6629,54 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7306,6 +7624,54 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8310,6 +8676,54 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9257,6 +9671,54 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/invalid-ip4-address-format-found.md b/docs/checks/column/patterns/invalid-ip4-address-format-found.md index b5b7408b8d..3b7cc39211 100644 --- a/docs/checks/column/patterns/invalid-ip4-address-format-found.md +++ b/docs/checks/column/patterns/invalid-ip4-address-format-found.md @@ -740,6 +740,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1499,6 +1535,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2284,6 +2358,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3044,6 +3154,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3829,6 +3977,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4589,6 +4773,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5459,6 +5681,46 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6276,6 +6538,46 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7150,6 +7452,46 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7967,6 +8309,46 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/invalid-ip6-address-format-found.md b/docs/checks/column/patterns/invalid-ip6-address-format-found.md index 1e45e41e00..f63c1a1aed 100644 --- a/docs/checks/column/patterns/invalid-ip6-address-format-found.md +++ b/docs/checks/column/patterns/invalid-ip6-address-format-found.md @@ -732,6 +732,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1483,6 +1519,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2260,6 +2334,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3012,6 +3122,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3789,6 +3937,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4541,6 +4725,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5403,6 +5625,46 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6212,6 +6474,46 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7078,6 +7380,46 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7887,6 +8229,46 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/invalid-usa-phone-format-found.md b/docs/checks/column/patterns/invalid-usa-phone-format-found.md index 0da3fb3cc4..c2fdd270a7 100644 --- a/docs/checks/column/patterns/invalid-usa-phone-format-found.md +++ b/docs/checks/column/patterns/invalid-usa-phone-format-found.md @@ -778,6 +778,46 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1579,6 +1619,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2406,6 +2488,46 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3208,6 +3330,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4035,6 +4199,46 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4837,6 +5041,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5749,6 +5995,50 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6608,6 +6898,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7524,6 +7858,50 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8383,6 +8761,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/invalid-usa-phone-format-percent.md b/docs/checks/column/patterns/invalid-usa-phone-format-percent.md index 146d7fbc2d..aba3c4cd0f 100644 --- a/docs/checks/column/patterns/invalid-usa-phone-format-percent.md +++ b/docs/checks/column/patterns/invalid-usa-phone-format-percent.md @@ -872,6 +872,52 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1773,6 +1819,54 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2700,6 +2794,52 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3602,6 +3742,54 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4529,6 +4717,52 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5431,6 +5665,54 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6443,6 +6725,56 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7402,6 +7734,56 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8418,6 +8800,56 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9377,6 +9809,56 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/invalid-usa-zipcode-format-found.md b/docs/checks/column/patterns/invalid-usa-zipcode-format-found.md index e9325b62e5..56cd74665a 100644 --- a/docs/checks/column/patterns/invalid-usa-zipcode-format-found.md +++ b/docs/checks/column/patterns/invalid-usa-zipcode-format-found.md @@ -760,6 +760,46 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1543,6 +1583,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2352,6 +2434,46 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3136,6 +3258,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3945,6 +4109,46 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4729,6 +4933,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5623,6 +5869,50 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6464,6 +6754,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7362,6 +7696,50 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8203,6 +8581,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/invalid-usa-zipcode-format-percent.md b/docs/checks/column/patterns/invalid-usa-zipcode-format-percent.md index 5ad2ef594c..0cb1acaf0a 100644 --- a/docs/checks/column/patterns/invalid-usa-zipcode-format-percent.md +++ b/docs/checks/column/patterns/invalid-usa-zipcode-format-percent.md @@ -858,6 +858,52 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1747,6 +1793,54 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2662,6 +2756,52 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3552,6 +3692,54 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4467,6 +4655,52 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5357,6 +5591,54 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6357,6 +6639,56 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7304,6 +7636,56 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8308,6 +8690,56 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9255,6 +9687,56 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/invalid-uuid-format-found.md b/docs/checks/column/patterns/invalid-uuid-format-found.md index 6b1d602fdc..a102476ce2 100644 --- a/docs/checks/column/patterns/invalid-uuid-format-found.md +++ b/docs/checks/column/patterns/invalid-uuid-format-found.md @@ -748,6 +748,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1515,6 +1551,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2308,6 +2382,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3076,6 +3186,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3869,6 +4017,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4637,6 +4821,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5515,6 +5737,46 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6340,6 +6602,46 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7222,6 +7524,46 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8047,6 +8389,46 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/invalid-uuid-format-percent.md b/docs/checks/column/patterns/invalid-uuid-format-percent.md index ac209bd656..821e92ef8e 100644 --- a/docs/checks/column/patterns/invalid-uuid-format-percent.md +++ b/docs/checks/column/patterns/invalid-uuid-format-percent.md @@ -840,6 +840,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1705,6 +1747,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2596,6 +2682,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3462,6 +3590,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4353,6 +4525,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5219,6 +5433,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6195,6 +6453,52 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7118,6 +7422,52 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8098,6 +8448,52 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9021,6 +9417,52 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/text-not-matching-date-pattern-found.md b/docs/checks/column/patterns/text-not-matching-date-pattern-found.md index fc8d1347d4..605fc8e2dd 100644 --- a/docs/checks/column/patterns/text-not-matching-date-pattern-found.md +++ b/docs/checks/column/patterns/text-not-matching-date-pattern-found.md @@ -856,6 +856,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1740,6 +1783,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2650,6 +2738,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3535,6 +3666,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4445,6 +4621,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5330,6 +5549,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6325,6 +6589,53 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7267,6 +7578,53 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8266,6 +8624,53 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9208,6 +9613,53 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/text-not-matching-date-pattern-percent.md b/docs/checks/column/patterns/text-not-matching-date-pattern-percent.md index f96c981b6e..7c4f187ce9 100644 --- a/docs/checks/column/patterns/text-not-matching-date-pattern-percent.md +++ b/docs/checks/column/patterns/text-not-matching-date-pattern-percent.md @@ -856,6 +856,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1740,6 +1783,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2650,6 +2738,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3535,6 +3666,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4445,6 +4621,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5330,6 +5549,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6325,6 +6589,53 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7267,6 +7578,53 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8266,6 +8624,53 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9208,6 +9613,53 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND REGEXP_SUBSTR(analyzed_table."target_column", '^(\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/text-not-matching-name-pattern-percent.md b/docs/checks/column/patterns/text-not-matching-name-pattern-percent.md index b20488c511..48201f84cf 100644 --- a/docs/checks/column/patterns/text-not-matching-name-pattern-percent.md +++ b/docs/checks/column/patterns/text-not-matching-name-pattern-percent.md @@ -832,6 +832,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1689,6 +1731,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2572,6 +2658,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3430,6 +3558,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4313,6 +4485,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5171,6 +5385,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6139,6 +6397,52 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7054,6 +7358,52 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8026,6 +8376,52 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8941,6 +9337,52 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/text-not-matching-regex-found.md b/docs/checks/column/patterns/text-not-matching-regex-found.md index 46ab9fb309..7208c8dad4 100644 --- a/docs/checks/column/patterns/text-not-matching-regex-found.md +++ b/docs/checks/column/patterns/text-not-matching-regex-found.md @@ -838,6 +838,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1702,6 +1745,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2592,6 +2680,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3457,6 +3588,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4347,6 +4523,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5212,6 +5431,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6187,6 +6451,53 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7109,6 +7420,53 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8088,6 +8446,53 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9010,6 +9415,53 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/patterns/texts-not-matching-regex-percent.md b/docs/checks/column/patterns/texts-not-matching-regex-percent.md index 1a0716d5fc..0b8bce5635 100644 --- a/docs/checks/column/patterns/texts-not-matching-regex-percent.md +++ b/docs/checks/column/patterns/texts-not-matching-regex-percent.md @@ -838,6 +838,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1702,6 +1745,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2592,6 +2680,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3457,6 +3588,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4347,6 +4523,49 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5212,6 +5431,51 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6187,6 +6451,53 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7109,6 +7420,53 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8088,6 +8446,53 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9010,6 +9415,53 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL AND + REGEXP_SUBSTR(analyzed_table."target_column", '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/pii/contains-email-percent.md b/docs/checks/column/pii/contains-email-percent.md index 1aabba9e1a..07b9035318 100644 --- a/docs/checks/column/pii/contains-email-percent.md +++ b/docs/checks/column/pii/contains-email-percent.md @@ -867,6 +867,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1764,6 +1806,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2687,6 +2773,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3585,6 +3713,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4508,6 +4680,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5406,6 +5620,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6414,6 +6672,52 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7369,6 +7673,52 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8381,6 +8731,52 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9336,6 +9732,52 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/pii/contains-ip4-percent.md b/docs/checks/column/pii/contains-ip4-percent.md index b854d04d3c..5d19fd0906 100644 --- a/docs/checks/column/pii/contains-ip4-percent.md +++ b/docs/checks/column/pii/contains-ip4-percent.md @@ -859,6 +859,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1750,6 +1792,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2667,6 +2753,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3559,6 +3687,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4476,6 +4648,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5368,6 +5582,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6370,6 +6628,52 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7319,6 +7623,52 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8325,6 +8675,52 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9274,6 +9670,52 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/pii/contains-ip6-percent.md b/docs/checks/column/pii/contains-ip6-percent.md index df76ec97d4..a509abeeb3 100644 --- a/docs/checks/column/pii/contains-ip6-percent.md +++ b/docs/checks/column/pii/contains-ip6-percent.md @@ -992,6 +992,54 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2018,6 +2066,56 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3070,6 +3168,54 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4097,6 +4243,56 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5149,6 +5345,54 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6176,6 +6420,56 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -7313,6 +7607,58 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8397,6 +8743,58 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -9538,6 +9936,58 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -10622,6 +11072,58 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/pii/contains-usa-phone-percent.md b/docs/checks/column/pii/contains-usa-phone-percent.md index 6bcc663d95..817f56cd57 100644 --- a/docs/checks/column/pii/contains-usa-phone-percent.md +++ b/docs/checks/column/pii/contains-usa-phone-percent.md @@ -899,6 +899,50 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1828,6 +1872,52 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2783,6 +2873,50 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3713,6 +3847,52 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4668,6 +4848,50 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5598,6 +5822,52 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6638,6 +6908,54 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7625,6 +7943,54 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8669,6 +9035,54 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9656,6 +10070,54 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/pii/contains-usa-zipcode-percent.md b/docs/checks/column/pii/contains-usa-zipcode-percent.md index 3641478352..cd984229bf 100644 --- a/docs/checks/column/pii/contains-usa-zipcode-percent.md +++ b/docs/checks/column/pii/contains-usa-zipcode-percent.md @@ -881,6 +881,50 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1794,6 +1838,52 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2733,6 +2823,50 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3647,6 +3781,52 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4586,6 +4766,50 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5500,6 +5724,52 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6524,6 +6794,54 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7495,6 +7813,54 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8523,6 +8889,54 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9494,6 +9908,54 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST(analyzed_table."target_column" AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/text/max-word-count.md b/docs/checks/column/text/max-word-count.md index ce3680f620..e30494862c 100644 --- a/docs/checks/column/text/max-word-count.md +++ b/docs/checks/column/text/max-word-count.md @@ -597,6 +597,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1206,6 +1232,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1841,6 +1895,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2451,6 +2531,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3086,6 +3194,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3696,6 +3830,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4416,6 +4578,36 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5083,6 +5275,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5807,6 +6029,36 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6474,6 +6726,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/text/min-word-count.md b/docs/checks/column/text/min-word-count.md index 35e15962f6..27f38639e0 100644 --- a/docs/checks/column/text/min-word-count.md +++ b/docs/checks/column/text/min-word-count.md @@ -597,6 +597,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1206,6 +1232,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1841,6 +1895,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2451,6 +2531,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3086,6 +3194,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3696,6 +3830,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4416,6 +4578,36 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5083,6 +5275,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5807,6 +6029,36 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6474,6 +6726,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN( + LENGTH( TRIM(analyzed_table."target_column") ) - LENGTH( OREPLACE(TRIM(analyzed_table."target_column"), ' ', '') ) + CASE WHEN LENGTH( TRIM(analyzed_table."target_column") ) > 0 THEN 1 ELSE 0 END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/text/text-length-above-max-length-percent.md b/docs/checks/column/text/text-length-above-max-length-percent.md index 8e38a749de..84ce60f2c6 100644 --- a/docs/checks/column/text/text-length-above-max-length-percent.md +++ b/docs/checks/column/text/text-length-above-max-length-percent.md @@ -829,6 +829,47 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + )/ COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + )/ COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1684,6 +1725,49 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + )/ COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + )/ COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2565,6 +2649,47 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + )/ COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + )/ COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3421,6 +3546,49 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + )/ COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + )/ COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4302,6 +4470,47 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + )/ COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + )/ COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5158,6 +5367,49 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + )/ COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + )/ COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6124,6 +6376,51 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + )/ COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + )/ COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7037,6 +7334,51 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + )/ COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + )/ COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8007,6 +8349,51 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + )/ COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + )/ COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8920,6 +9307,51 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + )/ COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + )/ COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/text/text-length-above-max-length.md b/docs/checks/column/text/text-length-above-max-length.md index 29a6deed38..76a0eacd36 100644 --- a/docs/checks/column/text/text-length-above-max-length.md +++ b/docs/checks/column/text/text-length-above-max-length.md @@ -735,6 +735,40 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1490,6 +1524,42 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2271,6 +2341,40 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3027,6 +3131,42 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3808,6 +3948,40 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4564,6 +4738,42 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5430,6 +5640,44 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6243,6 +6491,44 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7113,6 +7399,44 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7926,6 +8250,44 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) > 100 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/text/text-length-below-min-length-percent.md b/docs/checks/column/text/text-length-below-min-length-percent.md index cf273255e8..00b02893e1 100644 --- a/docs/checks/column/text/text-length-below-min-length-percent.md +++ b/docs/checks/column/text/text-length-below-min-length-percent.md @@ -829,6 +829,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1684,6 +1724,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2565,6 +2647,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3421,6 +3543,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4302,6 +4466,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5158,6 +5362,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6124,6 +6370,50 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7037,6 +7327,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8007,6 +8341,50 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8920,6 +9298,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/text/text-length-below-min-length.md b/docs/checks/column/text/text-length-below-min-length.md index 0a5cff035e..303868f20d 100644 --- a/docs/checks/column/text/text-length-below-min-length.md +++ b/docs/checks/column/text/text-length-below-min-length.md @@ -736,6 +736,40 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1493,6 +1527,42 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2276,6 +2346,40 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3034,6 +3138,42 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3817,6 +3957,40 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4575,6 +4749,42 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5443,6 +5653,44 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6258,6 +6506,44 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7130,6 +7416,44 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7945,6 +8269,44 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) < 5 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/text/text-length-in-range-percent.md b/docs/checks/column/text/text-length-in-range-percent.md index 4dc173c4a7..9d18283dc8 100644 --- a/docs/checks/column/text/text-length-in-range-percent.md +++ b/docs/checks/column/text/text-length-in-range-percent.md @@ -823,6 +823,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) BETWEEN {{parameters.min_length}} AND {{parameters.max_length}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST(analyzed_table."target_column" AS VARCHAR(4096))) BETWEEN 5 AND 100 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1670,6 +1710,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) BETWEEN {{parameters.min_length}} AND {{parameters.max_length}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST(analyzed_table."target_column" AS VARCHAR(4096))) BETWEEN 5 AND 100 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2543,6 +2625,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) BETWEEN {{parameters.min_length}} AND {{parameters.max_length}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST(analyzed_table."target_column" AS VARCHAR(4096))) BETWEEN 5 AND 100 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3391,6 +3513,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) BETWEEN {{parameters.min_length}} AND {{parameters.max_length}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST(analyzed_table."target_column" AS VARCHAR(4096))) BETWEEN 5 AND 100 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4264,6 +4428,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) BETWEEN {{parameters.min_length}} AND {{parameters.max_length}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST(analyzed_table."target_column" AS VARCHAR(4096))) BETWEEN 5 AND 100 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5112,6 +5316,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) BETWEEN {{parameters.min_length}} AND {{parameters.max_length}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST(analyzed_table."target_column" AS VARCHAR(4096))) BETWEEN 5 AND 100 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6070,6 +6316,50 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) BETWEEN {{parameters.min_length}} AND {{parameters.max_length}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST(analyzed_table."target_column" AS VARCHAR(4096))) BETWEEN 5 AND 100 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6975,6 +7265,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) BETWEEN {{parameters.min_length}} AND {{parameters.max_length}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST(analyzed_table."target_column" AS VARCHAR(4096))) BETWEEN 5 AND 100 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7937,6 +8271,50 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) BETWEEN {{parameters.min_length}} AND {{parameters.max_length}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST(analyzed_table."target_column" AS VARCHAR(4096))) BETWEEN 5 AND 100 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8842,6 +9220,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) BETWEEN {{parameters.min_length}} AND {{parameters.max_length}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST(analyzed_table."target_column" AS VARCHAR(4096))) BETWEEN 5 AND 100 THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/text/text-max-length.md b/docs/checks/column/text/text-max-length.md index d1006af199..381b6329f7 100644 --- a/docs/checks/column/text/text-max-length.md +++ b/docs/checks/column/text/text-max-length.md @@ -603,6 +603,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1218,6 +1244,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1859,6 +1913,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2475,6 +2555,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3116,6 +3224,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3732,6 +3866,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4458,6 +4620,36 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5131,6 +5323,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5861,6 +6083,36 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6534,6 +6786,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/text/text-mean-length.md b/docs/checks/column/text/text-mean-length.md index 8bd2c542f1..eb7b517d23 100644 --- a/docs/checks/column/text/text-mean-length.md +++ b/docs/checks/column/text/text-mean-length.md @@ -603,6 +603,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1218,6 +1244,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1859,6 +1913,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2475,6 +2555,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3116,6 +3224,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3732,6 +3866,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4458,6 +4620,36 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5131,6 +5323,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5861,6 +6083,36 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + AVG( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6534,6 +6786,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + AVG( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/text/text-min-length.md b/docs/checks/column/text/text-min-length.md index a1fb705a88..0db5d9d407 100644 --- a/docs/checks/column/text/text-min-length.md +++ b/docs/checks/column/text/text-min-length.md @@ -603,6 +603,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1218,6 +1244,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1859,6 +1913,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2475,6 +2555,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3116,6 +3224,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3732,6 +3866,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4458,6 +4620,36 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5131,6 +5323,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5861,6 +6083,36 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MIN( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6534,6 +6786,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MIN( + LENGTH(CAST(analyzed_table."target_column" AS VARCHAR(4096))) + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/uniqueness/distinct-count-anomaly.md b/docs/checks/column/uniqueness/distinct-count-anomaly.md index 3a1893fb32..36eb931dc3 100644 --- a/docs/checks/column/uniqueness/distinct-count-anomaly.md +++ b/docs/checks/column/uniqueness/distinct-count-anomaly.md @@ -596,6 +596,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1203,6 +1229,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1836,6 +1890,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2444,6 +2524,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3162,6 +3270,36 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3827,6 +3965,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/uniqueness/distinct-count-change-1-day.md b/docs/checks/column/uniqueness/distinct-count-change-1-day.md index e34c5a92ab..6a971c48a4 100644 --- a/docs/checks/column/uniqueness/distinct-count-change-1-day.md +++ b/docs/checks/column/uniqueness/distinct-count-change-1-day.md @@ -598,6 +598,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1208,6 +1234,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1844,6 +1898,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2455,6 +2535,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3176,6 +3284,36 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3844,6 +3982,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/uniqueness/distinct-count-change-30-days.md b/docs/checks/column/uniqueness/distinct-count-change-30-days.md index 260be7fc52..f8e2d41a9f 100644 --- a/docs/checks/column/uniqueness/distinct-count-change-30-days.md +++ b/docs/checks/column/uniqueness/distinct-count-change-30-days.md @@ -599,6 +599,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1209,6 +1235,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1845,6 +1899,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2456,6 +2536,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3177,6 +3285,36 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3845,6 +3983,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/uniqueness/distinct-count-change-7-days.md b/docs/checks/column/uniqueness/distinct-count-change-7-days.md index 6831b26dda..b4484d929a 100644 --- a/docs/checks/column/uniqueness/distinct-count-change-7-days.md +++ b/docs/checks/column/uniqueness/distinct-count-change-7-days.md @@ -599,6 +599,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1209,6 +1235,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1845,6 +1899,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2456,6 +2536,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3177,6 +3285,36 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3845,6 +3983,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/uniqueness/distinct-count-change.md b/docs/checks/column/uniqueness/distinct-count-change.md index 62d8d69bed..04f811e2a5 100644 --- a/docs/checks/column/uniqueness/distinct-count-change.md +++ b/docs/checks/column/uniqueness/distinct-count-change.md @@ -595,6 +595,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1202,6 +1228,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1835,6 +1889,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2443,6 +2523,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3076,6 +3184,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3684,6 +3818,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4402,6 +4564,36 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5067,6 +5259,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5789,6 +6011,36 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6454,6 +6706,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/uniqueness/distinct-count.md b/docs/checks/column/uniqueness/distinct-count.md index 11c22275af..a9735e5e83 100644 --- a/docs/checks/column/uniqueness/distinct-count.md +++ b/docs/checks/column/uniqueness/distinct-count.md @@ -592,6 +592,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1196,6 +1222,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1826,6 +1880,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2431,6 +2511,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3061,6 +3169,32 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3666,6 +3800,34 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4381,6 +4543,36 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5043,6 +5235,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5762,6 +5984,36 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6424,6 +6676,36 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT( + DISTINCT(analyzed_table."target_column") + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/uniqueness/distinct-percent-anomaly.md b/docs/checks/column/uniqueness/distinct-percent-anomaly.md index a52c4e0272..f1a414da00 100644 --- a/docs/checks/column/uniqueness/distinct-percent-anomaly.md +++ b/docs/checks/column/uniqueness/distinct-percent-anomaly.md @@ -656,6 +656,36 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1327,6 +1357,38 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2024,6 +2086,36 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2696,6 +2788,38 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3478,6 +3602,40 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4207,6 +4365,40 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/uniqueness/distinct-percent-change-1-day.md b/docs/checks/column/uniqueness/distinct-percent-change-1-day.md index e30ba8fc88..cef4585ab3 100644 --- a/docs/checks/column/uniqueness/distinct-percent-change-1-day.md +++ b/docs/checks/column/uniqueness/distinct-percent-change-1-day.md @@ -658,6 +658,36 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1332,6 +1362,38 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2032,6 +2094,36 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2707,6 +2799,38 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3492,6 +3616,40 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4224,6 +4382,40 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/uniqueness/distinct-percent-change-30-days.md b/docs/checks/column/uniqueness/distinct-percent-change-30-days.md index e6ab2488c9..85acd285ad 100644 --- a/docs/checks/column/uniqueness/distinct-percent-change-30-days.md +++ b/docs/checks/column/uniqueness/distinct-percent-change-30-days.md @@ -659,6 +659,36 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1333,6 +1363,38 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2033,6 +2095,36 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2708,6 +2800,38 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3493,6 +3617,40 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4225,6 +4383,40 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/uniqueness/distinct-percent-change-7-days.md b/docs/checks/column/uniqueness/distinct-percent-change-7-days.md index ed5f67b95d..75f1aba36f 100644 --- a/docs/checks/column/uniqueness/distinct-percent-change-7-days.md +++ b/docs/checks/column/uniqueness/distinct-percent-change-7-days.md @@ -659,6 +659,36 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1333,6 +1363,38 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2033,6 +2095,36 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2708,6 +2800,38 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3493,6 +3617,40 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4225,6 +4383,40 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/uniqueness/distinct-percent-change.md b/docs/checks/column/uniqueness/distinct-percent-change.md index ea10ac1220..6ee2169494 100644 --- a/docs/checks/column/uniqueness/distinct-percent-change.md +++ b/docs/checks/column/uniqueness/distinct-percent-change.md @@ -655,6 +655,36 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1326,6 +1356,38 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2023,6 +2085,36 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2695,6 +2787,38 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3392,6 +3516,36 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4064,6 +4218,38 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4846,6 +5032,40 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5575,6 +5795,40 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -6361,6 +6615,40 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7090,6 +7378,40 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/uniqueness/distinct-percent.md b/docs/checks/column/uniqueness/distinct-percent.md index c320390511..0843d12aa0 100644 --- a/docs/checks/column/uniqueness/distinct-percent.md +++ b/docs/checks/column/uniqueness/distinct-percent.md @@ -653,6 +653,36 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1321,6 +1351,38 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2015,6 +2077,36 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2684,6 +2776,38 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3378,6 +3502,36 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4047,6 +4201,38 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4826,6 +5012,40 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5552,6 +5772,40 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -6335,6 +6589,40 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7061,6 +7349,40 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT analyzed_table."target_column") + / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/uniqueness/duplicate-count.md b/docs/checks/column/uniqueness/duplicate-count.md index ba9d3e96f0..a4711e6465 100644 --- a/docs/checks/column/uniqueness/duplicate-count.md +++ b/docs/checks/column/uniqueness/duplicate-count.md @@ -564,6 +564,30 @@ spec: AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT({{ lib.render_target_column('analyzed_table') }})) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") - COUNT(DISTINCT(analyzed_table."target_column")) + AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1137,6 +1161,32 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT({{ lib.render_target_column('analyzed_table') }})) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") - COUNT(DISTINCT(analyzed_table."target_column")) + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1736,6 +1786,30 @@ spec: AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT({{ lib.render_target_column('analyzed_table') }})) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") - COUNT(DISTINCT(analyzed_table."target_column")) + AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2310,6 +2384,32 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT({{ lib.render_target_column('analyzed_table') }})) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") - COUNT(DISTINCT(analyzed_table."target_column")) + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2909,6 +3009,30 @@ spec: AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT({{ lib.render_target_column('analyzed_table') }})) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") - COUNT(DISTINCT(analyzed_table."target_column")) + AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3483,6 +3607,32 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT({{ lib.render_target_column('analyzed_table') }})) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") - COUNT(DISTINCT(analyzed_table."target_column")) + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4167,6 +4317,34 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT({{ lib.render_target_column('analyzed_table') }})) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") - COUNT(DISTINCT(analyzed_table."target_column")) + AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4798,6 +4976,34 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT({{ lib.render_target_column('analyzed_table') }})) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") - COUNT(DISTINCT(analyzed_table."target_column")) + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5486,6 +5692,34 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT({{ lib.render_target_column('analyzed_table') }})) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(analyzed_table."target_column") - COUNT(DISTINCT(analyzed_table."target_column")) + AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6117,6 +6351,34 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT({{ lib.render_target_column('analyzed_table') }})) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(analyzed_table."target_column") - COUNT(DISTINCT(analyzed_table."target_column")) + AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/uniqueness/duplicate-percent.md b/docs/checks/column/uniqueness/duplicate-percent.md index 8502631db1..c9948968e7 100644 --- a/docs/checks/column/uniqueness/duplicate-percent.md +++ b/docs/checks/column/uniqueness/duplicate-percent.md @@ -688,6 +688,38 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT(analyzed_table."target_column") - COUNT(DISTINCT analyzed_table."target_column") + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1393,6 +1425,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT(analyzed_table."target_column") - COUNT(DISTINCT analyzed_table."target_column") + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2124,6 +2190,38 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT(analyzed_table."target_column") - COUNT(DISTINCT analyzed_table."target_column") + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2830,6 +2928,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT(analyzed_table."target_column") - COUNT(DISTINCT analyzed_table."target_column") + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3561,6 +3693,38 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT(analyzed_table."target_column") - COUNT(DISTINCT analyzed_table."target_column") + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4267,6 +4431,40 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT(analyzed_table."target_column") - COUNT(DISTINCT analyzed_table."target_column") + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5083,6 +5281,42 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT(analyzed_table."target_column") - COUNT(DISTINCT analyzed_table."target_column") + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5846,6 +6080,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT(analyzed_table."target_column") - COUNT(DISTINCT analyzed_table."target_column") + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -6666,6 +6936,42 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT(analyzed_table."target_column") - COUNT(DISTINCT analyzed_table."target_column") + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7429,6 +7735,42 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT(analyzed_table."target_column") - COUNT(DISTINCT analyzed_table."target_column") + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/whitespace/empty-text-found.md b/docs/checks/column/whitespace/empty-text-found.md index a4378434e3..435f5e1c93 100644 --- a/docs/checks/column/whitespace/empty-text-found.md +++ b/docs/checks/column/whitespace/empty-text-found.md @@ -765,6 +765,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1551,6 +1587,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2363,6 +2437,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3150,6 +3260,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3962,6 +4110,42 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4749,6 +4933,44 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5646,6 +5868,46 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6490,6 +6752,46 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7391,6 +7693,46 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8235,6 +8577,46 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/whitespace/empty-text-percent.md b/docs/checks/column/whitespace/empty-text-percent.md index 1305d25bab..23fdf8b2f5 100644 --- a/docs/checks/column/whitespace/empty-text-percent.md +++ b/docs/checks/column/whitespace/empty-text-percent.md @@ -852,6 +852,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1732,6 +1774,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2638,6 +2724,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3519,6 +3647,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4425,6 +4597,48 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5306,6 +5520,50 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6297,6 +6555,52 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7235,6 +7539,52 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8230,6 +8580,52 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9168,6 +9564,52 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") = 0 + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/whitespace/null-placeholder-text-found.md b/docs/checks/column/whitespace/null-placeholder-text-found.md index 95665c5bce..3f9e053727 100644 --- a/docs/checks/column/whitespace/null-placeholder-text-found.md +++ b/docs/checks/column/whitespace/null-placeholder-text-found.md @@ -739,6 +739,41 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table') }}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1498,6 +1533,43 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table') }}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2283,6 +2355,41 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table') }}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3043,6 +3150,43 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table') }}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3828,6 +3972,41 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table') }}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4588,6 +4767,43 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table') }}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5458,6 +5674,45 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table') }}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6275,6 +6530,45 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table') }}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7149,6 +7443,45 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table') }}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7966,6 +8299,45 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table') }}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/whitespace/null-placeholder-text-percent.md b/docs/checks/column/whitespace/null-placeholder-text-percent.md index 4c81d5b81e..0ea8e185fb 100644 --- a/docs/checks/column/whitespace/null-placeholder-text-percent.md +++ b/docs/checks/column/whitespace/null-placeholder-text-percent.md @@ -830,6 +830,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1686,6 +1726,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2568,6 +2650,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3425,6 +3547,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4307,6 +4471,46 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5164,6 +5368,48 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6131,6 +6377,50 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7045,6 +7335,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8016,6 +8350,50 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8930,6 +9308,50 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER(analyzed_table."target_column") IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/whitespace/text-surrounded-by-whitespace-found.md b/docs/checks/column/whitespace/text-surrounded-by-whitespace-found.md index b74f510098..4471bf452c 100644 --- a/docs/checks/column/whitespace/text-surrounded-by-whitespace-found.md +++ b/docs/checks/column/whitespace/text-surrounded-by-whitespace-found.md @@ -797,6 +797,44 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1616,6 +1654,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2461,6 +2539,44 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3281,6 +3397,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4126,6 +4282,44 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4946,6 +5140,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5876,6 +6110,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6753,6 +7029,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7687,6 +8005,48 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8564,6 +8924,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/whitespace/text-surrounded-by-whitespace-percent.md b/docs/checks/column/whitespace/text-surrounded-by-whitespace-percent.md index ce912fbd94..79bbb6b919 100644 --- a/docs/checks/column/whitespace/text-surrounded-by-whitespace-percent.md +++ b/docs/checks/column/whitespace/text-surrounded-by-whitespace-percent.md @@ -891,6 +891,51 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1811,6 +1856,53 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2757,6 +2849,51 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3678,6 +3815,53 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4624,6 +4808,51 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5545,6 +5774,53 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6576,6 +6852,55 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7554,6 +7879,55 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8589,6 +8963,55 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9567,6 +9990,55 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN (analyzed_table."target_column") IS NOT NULL + AND TRIM(analyzed_table."target_column") <> '' + AND LENGTH(analyzed_table."target_column") <> LENGTH(TRIM(analyzed_table."target_column")) + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/whitespace/whitespace-text-found.md b/docs/checks/column/whitespace/whitespace-text-found.md index 341b8b4808..b8206c172c 100644 --- a/docs/checks/column/whitespace/whitespace-text-found.md +++ b/docs/checks/column/whitespace/whitespace-text-found.md @@ -794,6 +794,44 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1612,6 +1650,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2456,6 +2534,44 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3275,6 +3391,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4119,6 +4275,44 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4938,6 +5132,46 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5867,6 +6101,48 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6743,6 +7019,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7676,6 +7994,48 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8552,6 +8912,48 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/column/whitespace/whitespace-text-percent.md b/docs/checks/column/whitespace/whitespace-text-percent.md index d14c5d1695..c33bc3766e 100644 --- a/docs/checks/column/whitespace/whitespace-text-percent.md +++ b/docs/checks/column/whitespace/whitespace-text-percent.md @@ -887,6 +887,50 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1804,6 +1848,52 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2747,6 +2837,50 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3665,6 +3799,52 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4608,6 +4788,50 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5526,6 +5750,52 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -6554,6 +6824,54 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7529,6 +7847,54 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -8561,6 +8927,54 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9536,6 +9950,54 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(analyzed_table."target_column") = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN analyzed_table."target_column" IS NOT NULL + AND LENGTH(analyzed_table."target_column") <> 0 + AND TRIM(analyzed_table."target_column") = '' + THEN 1 + ELSE 0 + END + ) / COUNT(analyzed_table."target_column") + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/table/accuracy/total-row-count-match-percent.md b/docs/checks/table/accuracy/total-row-count-match-percent.md index 90bee11b56..bbd1e10b34 100644 --- a/docs/checks/table/accuracy/total-row-count-match-percent.md +++ b/docs/checks/table/accuracy/total-row-count-match-percent.md @@ -595,6 +595,41 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + COUNT(*) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + COUNT(*) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + COUNT(*) + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1218,6 +1253,41 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + COUNT(*) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + COUNT(*) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + COUNT(*) + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1841,6 +1911,41 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + COUNT(*) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + COUNT(*) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SELECT + COUNT(*) + FROM landing_zone.customer_raw AS referenced_table + ) AS expected_value, + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/table/availability/table-availability.md b/docs/checks/table/availability/table-availability.md index 7cf8787f86..3e4f94ab1b 100644 --- a/docs/checks/table/availability/table-availability.md +++ b/docs/checks/table/availability/table-availability.md @@ -718,6 +718,43 @@ spec: ) AS tab_scan ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + 0.0 AS actual_value + {{- lib.render_time_dimension_projection('tab_scan') }} + FROM + ( + SELECT + * + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + QUALIFY ROW_NUMBER() OVER (ORDER BY 1) = 1 + ) AS tab_scan + {% if lib.time_series is not none -%} + GROUP BY time_period + ORDER BY time_period + {%- endif -%} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + 0.0 AS actual_value + FROM + ( + SELECT + * + FROM ""."" AS analyzed_table + + QUALIFY ROW_NUMBER() OVER (ORDER BY 1) = 1 + ) AS tab_scan + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1465,6 +1502,43 @@ spec: ) AS tab_scan ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + 0.0 AS actual_value + {{- lib.render_time_dimension_projection('tab_scan') }} + FROM + ( + SELECT + * + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + QUALIFY ROW_NUMBER() OVER (ORDER BY 1) = 1 + ) AS tab_scan + {% if lib.time_series is not none -%} + GROUP BY time_period + ORDER BY time_period + {%- endif -%} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + 0.0 AS actual_value + FROM + ( + SELECT + * + FROM ""."" AS analyzed_table + + QUALIFY ROW_NUMBER() OVER (ORDER BY 1) = 1 + ) AS tab_scan + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2212,6 +2286,43 @@ spec: ) AS tab_scan ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + 0.0 AS actual_value + {{- lib.render_time_dimension_projection('tab_scan') }} + FROM + ( + SELECT + * + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + QUALIFY ROW_NUMBER() OVER (ORDER BY 1) = 1 + ) AS tab_scan + {% if lib.time_series is not none -%} + GROUP BY time_period + ORDER BY time_period + {%- endif -%} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + 0.0 AS actual_value + FROM + ( + SELECT + * + FROM ""."" AS analyzed_table + + QUALIFY ROW_NUMBER() OVER (ORDER BY 1) = 1 + ) AS tab_scan + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/table/comparisons/row-count-match.md b/docs/checks/table/comparisons/row-count-match.md index 90df1366ad..ed7632a3eb 100644 --- a/docs/checks/table/comparisons/row-count-match.md +++ b/docs/checks/table/comparisons/row-count-match.md @@ -536,6 +536,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1095,6 +1117,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1654,6 +1698,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2298,6 +2364,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -2949,6 +3041,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/table/custom_sql/import-custom-result-on-table.md b/docs/checks/table/custom_sql/import-custom-result-on-table.md index 611631bde7..2b0ad7d194 100644 --- a/docs/checks/table/custom_sql/import-custom-result-on-table.md +++ b/docs/checks/table/custom_sql/import-custom-result-on-table.md @@ -402,6 +402,24 @@ spec: ``` === "Rendered SQL for SQL Server" + ```sql + SELECT + logs.my_actual_value as actual_value, + logs.my_expected_value as expected_value, + logs.error_severity as severity + FROM custom_data_quality_results as logs + WHERE logs.analyzed_schema_name = '' AND logs.analyzed_table_name = '' + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {{ parameters.sql_query | replace('{table_name}', target_table.table_name) | replace('{schema_name}', target_table.schema_name) }} + ``` + === "Rendered SQL for Teradata" + ```sql SELECT logs.my_actual_value as actual_value, @@ -821,6 +839,24 @@ spec: ``` === "Rendered SQL for SQL Server" + ```sql + SELECT + logs.my_actual_value as actual_value, + logs.my_expected_value as expected_value, + logs.error_severity as severity + FROM custom_data_quality_results as logs + WHERE logs.analyzed_schema_name = '' AND logs.analyzed_table_name = '' + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {{ parameters.sql_query | replace('{table_name}', target_table.table_name) | replace('{schema_name}', target_table.schema_name) }} + ``` + === "Rendered SQL for Teradata" + ```sql SELECT logs.my_actual_value as actual_value, @@ -1240,6 +1276,24 @@ spec: ``` === "Rendered SQL for SQL Server" + ```sql + SELECT + logs.my_actual_value as actual_value, + logs.my_expected_value as expected_value, + logs.error_severity as severity + FROM custom_data_quality_results as logs + WHERE logs.analyzed_schema_name = '' AND logs.analyzed_table_name = '' + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {{ parameters.sql_query | replace('{table_name}', target_table.table_name) | replace('{schema_name}', target_table.schema_name) }} + ``` + === "Rendered SQL for Teradata" + ```sql SELECT logs.my_actual_value as actual_value, diff --git a/docs/checks/table/custom_sql/sql-aggregate-expression-on-table.md b/docs/checks/table/custom_sql/sql-aggregate-expression-on-table.md index a491a97b13..f046b674f0 100644 --- a/docs/checks/table/custom_sql/sql-aggregate-expression-on-table.md +++ b/docs/checks/table/custom_sql/sql-aggregate-expression-on-table.md @@ -545,6 +545,29 @@ spec: (SUM(col_net_price) + SUM(col_tax)) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SUM(col_net_price) + SUM(col_tax)) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1090,6 +1113,31 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + (SUM(col_net_price) + SUM(col_tax)) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1670,6 +1718,29 @@ spec: (SUM(col_net_price) + SUM(col_tax)) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SUM(col_net_price) + SUM(col_tax)) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2216,6 +2287,31 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + (SUM(col_net_price) + SUM(col_tax)) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2796,6 +2892,29 @@ spec: (SUM(col_net_price) + SUM(col_tax)) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SUM(col_net_price) + SUM(col_tax)) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3342,6 +3461,31 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + (SUM(col_net_price) + SUM(col_tax)) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4007,6 +4151,33 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SUM(col_net_price) + SUM(col_tax)) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4610,6 +4781,33 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + (SUM(col_net_price) + SUM(col_tax)) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5279,6 +5477,33 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + (SUM(col_net_price) + SUM(col_tax)) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5882,6 +6107,33 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + (SUM(col_net_price) + SUM(col_tax)) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/table/custom_sql/sql-condition-failed-on-table.md b/docs/checks/table/custom_sql/sql-condition-failed-on-table.md index 213379a7c0..d43862a94c 100644 --- a/docs/checks/table/custom_sql/sql-condition-failed-on-table.md +++ b/docs/checks/table/custom_sql/sql-condition-failed-on-table.md @@ -745,6 +745,41 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN NOT ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN NOT (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1499,6 +1534,43 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN NOT ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN NOT (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2288,6 +2360,41 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN NOT ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN NOT (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3043,6 +3150,43 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN NOT ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN NOT (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3832,6 +3976,41 @@ spec: ) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN NOT ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN NOT (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END + ) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4587,6 +4766,43 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN NOT ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN NOT (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5461,6 +5677,45 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN NOT ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN NOT (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END + ) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6273,6 +6528,45 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN NOT ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN NOT (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7151,6 +7445,45 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN NOT ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + SUM( + CASE + WHEN NOT (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END + ) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -7963,6 +8296,45 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN NOT ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + SUM( + CASE + WHEN NOT (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END + ) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/table/custom_sql/sql-condition-passed-percent-on-table.md b/docs/checks/table/custom_sql/sql-condition-passed-percent-on-table.md index 958d2225d4..bedd9af0f2 100644 --- a/docs/checks/table/custom_sql/sql-condition-passed-percent-on-table.md +++ b/docs/checks/table/custom_sql/sql-condition-passed-percent-on-table.md @@ -803,6 +803,45 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1621,6 +1660,47 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2474,6 +2554,45 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3293,6 +3412,47 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -4146,6 +4306,45 @@ spec: END AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4965,6 +5164,47 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5903,6 +6143,49 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -6779,6 +7062,49 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -7721,6 +8047,49 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -8597,6 +8966,49 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN (SUM(col_total_impressions) > SUM(col_total_clicks)) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/table/timeliness/data-freshness-anomaly.md b/docs/checks/table/timeliness/data-freshness-anomaly.md index 4f7d4c0bc9..539a3ba426 100644 --- a/docs/checks/table/timeliness/data-freshness-anomaly.md +++ b/docs/checks/table/timeliness/data-freshness-anomaly.md @@ -995,6 +995,64 @@ spec: ) / 24.0 / 3600.0 AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2032,6 +2090,66 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3095,6 +3213,64 @@ spec: ) / 24.0 / 3600.0 AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4133,6 +4309,66 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" diff --git a/docs/checks/table/timeliness/data-freshness.md b/docs/checks/table/timeliness/data-freshness.md index b62a1c020c..529b3e93d9 100644 --- a/docs/checks/table/timeliness/data-freshness.md +++ b/docs/checks/table/timeliness/data-freshness.md @@ -994,6 +994,64 @@ spec: ) / 24.0 / 3600.0 AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2031,6 +2089,66 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3094,6 +3212,64 @@ spec: ) / 24.0 / 3600.0 AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4132,6 +4308,66 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5195,6 +5431,64 @@ spec: ) / 24.0 / 3600.0 AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6233,6 +6527,66 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" diff --git a/docs/checks/table/timeliness/data-ingestion-delay.md b/docs/checks/table/timeliness/data-ingestion-delay.md index 334d8b78f4..ed1329c3e2 100644 --- a/docs/checks/table/timeliness/data-ingestion-delay.md +++ b/docs/checks/table/timeliness/data-ingestion-delay.md @@ -1080,6 +1080,83 @@ spec: ) / 24.0 / 3600.0 AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_max_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_ingestion_event_max_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2208,6 +2285,85 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_max_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_ingestion_event_max_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3362,6 +3518,83 @@ spec: ) / 24.0 / 3600.0 AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_max_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_ingestion_event_max_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4491,6 +4724,85 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_max_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_ingestion_event_max_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5645,6 +5957,83 @@ spec: ) / 24.0 / 3600.0 AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_max_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_ingestion_event_max_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6774,6 +7163,85 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_max_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_ingestion_event_max_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -8012,6 +8480,87 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_max_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_ingestion_event_max_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -9197,6 +9746,87 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_max_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_ingestion_event_max_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -10439,6 +11069,87 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_max_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_ingestion_event_max_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -11624,6 +12335,87 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_max_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_ingestion_event_max_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP) + - CAST(MAX(analyzed_table."col_event_timestamp") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/table/timeliness/data-staleness.md b/docs/checks/table/timeliness/data-staleness.md index 04303fcd85..b5b75c170b 100644 --- a/docs/checks/table/timeliness/data-staleness.md +++ b/docs/checks/table/timeliness/data-staleness.md @@ -996,6 +996,64 @@ spec: ) / 24.0 / 3600.0 AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_ingestion_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_ingestion_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2034,6 +2092,66 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_ingestion_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_ingestion_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3098,6 +3216,64 @@ spec: ) / 24.0 / 3600.0 AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_ingestion_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_ingestion_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4137,6 +4313,66 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_ingestion_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_ingestion_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -5201,6 +5437,64 @@ spec: ) / 24.0 / 3600.0 AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_ingestion_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_ingestion_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6240,6 +6534,66 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_ingestion_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_current_ingestion_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX(analyzed_table."col_inserted_at") AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" diff --git a/docs/checks/table/timeliness/reload-lag.md b/docs/checks/table/timeliness/reload-lag.md index 1798d97319..95a856a646 100644 --- a/docs/checks/table/timeliness/reload-lag.md +++ b/docs/checks/table/timeliness/reload-lag.md @@ -1244,6 +1244,73 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + DATEDIFF( + {{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}, + {{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} + ) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_ingestion_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX( + EXTRACT(DAY FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -2510,6 +2577,73 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + DATEDIFF( + {{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}, + {{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} + ) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_ingestion_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX( + EXTRACT(DAY FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -3833,6 +3967,73 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + DATEDIFF( + {{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}, + {{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} + ) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_ingestion_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + MAX( + EXTRACT(DAY FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5099,6 +5300,73 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + DATEDIFF( + {{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}, + {{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} + ) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + + SELECT + {{ render_ingestion_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + MAX( + EXTRACT(DAY FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(analyzed_table."col_inserted_at" AS TIMESTAMP) - CAST(analyzed_table."col_event_timestamp" AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/table/uniqueness/duplicate-record-count.md b/docs/checks/table/uniqueness/duplicate-record-count.md index 7d37a673f9..6450cf193f 100644 --- a/docs/checks/table/uniqueness/duplicate-record-count.md +++ b/docs/checks/table/uniqueness/duplicate-record-count.md @@ -973,6 +973,55 @@ spec: GROUP BY [id], [created_at] ) grouping_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS duplicated_count + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value + FROM ( + SELECT COUNT(*) AS duplicated_count + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at" + ) grouping_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2040,6 +2089,61 @@ Expand the *Configure with data grouping* section to see additional examples for grouping_table.grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS duplicated_count + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value, + + grouping_table.grouping_level_1, + + grouping_table.grouping_level_2 + FROM ( + SELECT COUNT(*) AS duplicated_count, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at", grouping_level_1, grouping_level_2 + ) grouping_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3077,6 +3181,55 @@ spec: GROUP BY [id], [created_at] ) grouping_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS duplicated_count + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value + FROM ( + SELECT COUNT(*) AS duplicated_count + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at" + ) grouping_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4145,6 +4298,61 @@ Expand the *Configure with data grouping* section to see additional examples for grouping_table.grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS duplicated_count + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value, + + grouping_table.grouping_level_1, + + grouping_table.grouping_level_2 + FROM ( + SELECT COUNT(*) AS duplicated_count, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at", grouping_level_1, grouping_level_2 + ) grouping_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5182,6 +5390,55 @@ spec: GROUP BY [id], [created_at] ) grouping_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS duplicated_count + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value + FROM ( + SELECT COUNT(*) AS duplicated_count + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at" + ) grouping_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6250,6 +6507,61 @@ Expand the *Configure with data grouping* section to see additional examples for grouping_table.grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS duplicated_count + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value, + + grouping_table.grouping_level_1, + + grouping_table.grouping_level_2 + FROM ( + SELECT COUNT(*) AS duplicated_count, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at", grouping_level_1, grouping_level_2 + ) grouping_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -7404,6 +7716,61 @@ spec: time_period, time_period_utc ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS duplicated_count + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value, + time_period, + time_period_utc + FROM ( + SELECT COUNT(*) AS duplicated_count, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at", time_period, time_period_utc + ) grouping_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -8566,6 +8933,65 @@ Expand the *Configure with data grouping* section to see additional examples for time_period, time_period_utc ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS duplicated_count + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value, + + grouping_table.grouping_level_1, + + grouping_table.grouping_level_2, + time_period, + time_period_utc + FROM ( + SELECT COUNT(*) AS duplicated_count, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at", grouping_level_1, grouping_level_2, time_period, time_period_utc + ) grouping_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -9726,6 +10152,61 @@ spec: time_period, time_period_utc ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS duplicated_count + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value, + time_period, + time_period_utc + FROM ( + SELECT COUNT(*) AS duplicated_count, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at", time_period, time_period_utc + ) grouping_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -10888,6 +11369,65 @@ Expand the *Configure with data grouping* section to see additional examples for time_period, time_period_utc ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS duplicated_count + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value, + + grouping_table.grouping_level_1, + + grouping_table.grouping_level_2, + time_period, + time_period_utc + FROM ( + SELECT COUNT(*) AS duplicated_count, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at", grouping_level_1, grouping_level_2, time_period, time_period_utc + ) grouping_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/table/uniqueness/duplicate-record-percent.md b/docs/checks/table/uniqueness/duplicate-record-percent.md index af313cfad9..9658acfffb 100644 --- a/docs/checks/table/uniqueness/duplicate-record-percent.md +++ b/docs/checks/table/uniqueness/duplicate-record-percent.md @@ -966,6 +966,55 @@ spec: GROUP BY [id], [created_at] ) grouping_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY {{ extract_in_list(parameters.columns) -}} ) AS distinct_records + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY "id", "created_at") AS distinct_records + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at" + ) grouping_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2022,6 +2071,61 @@ Expand the *Configure with data grouping* section to see additional examples for grouping_table.grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY {{ extract_in_list(parameters.columns) -}} ) AS distinct_records + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value, + + grouping_table.grouping_level_1, + + grouping_table.grouping_level_2 + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY "id", "created_at") AS distinct_records, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at", grouping_level_1, grouping_level_2 + ) grouping_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3052,6 +3156,55 @@ spec: GROUP BY [id], [created_at] ) grouping_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY {{ extract_in_list(parameters.columns) -}} ) AS distinct_records + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY "id", "created_at") AS distinct_records + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at" + ) grouping_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -4109,6 +4262,61 @@ Expand the *Configure with data grouping* section to see additional examples for grouping_table.grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY {{ extract_in_list(parameters.columns) -}} ) AS distinct_records + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value, + + grouping_table.grouping_level_1, + + grouping_table.grouping_level_2 + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY "id", "created_at") AS distinct_records, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at", grouping_level_1, grouping_level_2 + ) grouping_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -5139,6 +5347,55 @@ spec: GROUP BY [id], [created_at] ) grouping_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY {{ extract_in_list(parameters.columns) -}} ) AS distinct_records + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY "id", "created_at") AS distinct_records + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at" + ) grouping_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -6196,6 +6453,61 @@ Expand the *Configure with data grouping* section to see additional examples for grouping_table.grouping_level_2 ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY {{ extract_in_list(parameters.columns) -}} ) AS distinct_records + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value, + + grouping_table.grouping_level_1, + + grouping_table.grouping_level_2 + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY "id", "created_at") AS distinct_records, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at", grouping_level_1, grouping_level_2 + ) grouping_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 + ``` ??? example "Trino" === "Sensor template for Trino" @@ -7341,6 +7653,61 @@ spec: time_period, time_period_utc ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY {{ extract_in_list(parameters.columns) -}} ) AS distinct_records + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value, + time_period, + time_period_utc + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY "id", "created_at") AS distinct_records, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at", time_period, time_period_utc + ) grouping_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -8490,6 +8857,65 @@ Expand the *Configure with data grouping* section to see additional examples for time_period, time_period_utc ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY {{ extract_in_list(parameters.columns) -}} ) AS distinct_records + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value, + + grouping_table.grouping_level_1, + + grouping_table.grouping_level_2, + time_period, + time_period_utc + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY "id", "created_at") AS distinct_records, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at", grouping_level_1, grouping_level_2, time_period, time_period_utc + ) grouping_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -9641,6 +10067,61 @@ spec: time_period, time_period_utc ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY {{ extract_in_list(parameters.columns) -}} ) AS distinct_records + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value, + time_period, + time_period_utc + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY "id", "created_at") AS distinct_records, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at", time_period, time_period_utc + ) grouping_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" @@ -10790,6 +11271,65 @@ Expand the *Configure with data grouping* section to see additional examples for time_period, time_period_utc ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY {{ extract_in_list(parameters.columns) -}} ) AS distinct_records + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value, + + grouping_table.grouping_level_1, + + grouping_table.grouping_level_2, + time_period, + time_period_utc + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY "id", "created_at") AS distinct_records, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + WHERE (COALESCE(CAST("id" AS VARCHAR(4096)), CAST("created_at" AS VARCHAR(4096))) IS NOT NULL) + GROUP BY "id", "created_at", grouping_level_1, grouping_level_2, time_period, time_period_utc + ) grouping_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ``` ??? example "Trino" === "Sensor template for Trino" diff --git a/docs/checks/table/volume/row-count-anomaly.md b/docs/checks/table/volume/row-count-anomaly.md index 3e443fbb8e..68e76a34c0 100644 --- a/docs/checks/table/volume/row-count-anomaly.md +++ b/docs/checks/table/volume/row-count-anomaly.md @@ -518,6 +518,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1039,6 +1061,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1586,6 +1632,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2108,6 +2176,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2740,6 +2832,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3319,6 +3437,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/table/volume/row-count-change-1-day.md b/docs/checks/table/volume/row-count-change-1-day.md index d827dc556e..5fa72bfc75 100644 --- a/docs/checks/table/volume/row-count-change-1-day.md +++ b/docs/checks/table/volume/row-count-change-1-day.md @@ -520,6 +520,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1044,6 +1066,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1594,6 +1640,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2119,6 +2187,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2754,6 +2846,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3336,6 +3454,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/table/volume/row-count-change-30-days.md b/docs/checks/table/volume/row-count-change-30-days.md index cc2b153367..b3c918a577 100644 --- a/docs/checks/table/volume/row-count-change-30-days.md +++ b/docs/checks/table/volume/row-count-change-30-days.md @@ -521,6 +521,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1045,6 +1067,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1595,6 +1641,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2120,6 +2188,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2755,6 +2847,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3337,6 +3455,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/table/volume/row-count-change-7-days.md b/docs/checks/table/volume/row-count-change-7-days.md index 06163fc46b..81b08c60ee 100644 --- a/docs/checks/table/volume/row-count-change-7-days.md +++ b/docs/checks/table/volume/row-count-change-7-days.md @@ -521,6 +521,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1045,6 +1067,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1595,6 +1641,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2120,6 +2188,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2755,6 +2847,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -3337,6 +3455,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/table/volume/row-count-change.md b/docs/checks/table/volume/row-count-change.md index 9f5690b392..50b6074cec 100644 --- a/docs/checks/table/volume/row-count-change.md +++ b/docs/checks/table/volume/row-count-change.md @@ -517,6 +517,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1038,6 +1060,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1585,6 +1631,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2107,6 +2175,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2654,6 +2746,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3176,6 +3290,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3808,6 +3946,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4387,6 +4551,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -5023,6 +5213,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5602,6 +5818,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/checks/table/volume/row-count.md b/docs/checks/table/volume/row-count.md index 4e8a0769f5..e623015723 100644 --- a/docs/checks/table/volume/row-count.md +++ b/docs/checks/table/volume/row-count.md @@ -515,6 +515,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -1032,6 +1054,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -1575,6 +1621,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -2093,6 +2161,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -2636,6 +2728,28 @@ spec: COUNT_BIG(*) AS actual_value FROM [your_sql_server_database].[].[] AS analyzed_table ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value + FROM ""."" AS analyzed_table + ``` ??? example "Trino" === "Sensor template for Trino" @@ -3154,6 +3268,30 @@ Expand the *Configure with data grouping* section to see additional examples for + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2 + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2 + ORDER BY grouping_level_1, grouping_level_2 ``` ??? example "Trino" @@ -3782,6 +3920,32 @@ spec: ORDER BY CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -4357,6 +4521,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2CAST(analyzed_table.[date_column] AS date) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + CAST(analyzed_table."date_column" AS DATE) AS time_period, + CAST(CAST(analyzed_table."date_column" AS DATE) AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" @@ -4989,6 +5179,32 @@ spec: ORDER BY DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + + ```sql + SELECT + COUNT(*) AS actual_value, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY time_period, time_period_utc + ORDER BY time_period, time_period_utc ``` ??? example "Trino" @@ -5564,6 +5780,32 @@ Expand the *Configure with data grouping* section to see additional examples for ORDER BY level_1, level_2DATEFROMPARTS(YEAR(CAST(analyzed_table.[date_column] AS date)), MONTH(CAST(analyzed_table.[date_column] AS date)), 1) + ``` + ??? example "Teradata" + + === "Sensor template for Teradata" + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` + === "Rendered SQL for Teradata" + ```sql + SELECT + COUNT(*) AS actual_value, + analyzed_table."country" AS grouping_level_1, + analyzed_table."state" AS grouping_level_2, + TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS time_period, + CAST(TRUNC(CAST(analyzed_table."date_column" AS DATE), 'MM') AS TIMESTAMP) AS time_period_utc + FROM ""."" AS analyzed_table + GROUP BY grouping_level_1, grouping_level_2, time_period, time_period_utc + ORDER BY grouping_level_1, grouping_level_2, time_period, time_period_utc ``` ??? example "Trino" diff --git a/docs/client/models/common.md b/docs/client/models/common.md index 0feef5f4a0..774560104a 100644 --- a/docs/client/models/common.md +++ b/docs/client/models/common.md @@ -617,7 +617,7 @@ Data source provider type (dialect type). | Data type | Enum values | |-----------|-------------| -|string|bigquery
databricks
mysql
oracle
postgresql
duckdb
presto
redshift
snowflake
spark
sqlserver
trino
hana
db2
mariadb
clickhouse
questdb
| +|string|bigquery
clickhouse
databricks
db2
duckdb
hana
mariadb
mysql
oracle
postgresql
presto
questdb
redshift
snowflake
spark
sqlserver
teradata
trino
| ___ @@ -652,6 +652,7 @@ Connection model returned by the rest api that is limited only to the basic fiel |[`mariadb`](../../reference/yaml/ConnectionYaml.md#mariadbparametersspec)|MariaDB connection parameters.|*[MariaDbParametersSpec](../../reference/yaml/ConnectionYaml.md#mariadbparametersspec)*| |[`clickhouse`](../../reference/yaml/ConnectionYaml.md#clickhouseparametersspec)|ClickHouse connection parameters.|*[ClickHouseParametersSpec](../../reference/yaml/ConnectionYaml.md#clickhouseparametersspec)*| |[`questdb`](../../reference/yaml/ConnectionYaml.md#questdbparametersspec)|QuestDB connection parameters.|*[QuestDbParametersSpec](../../reference/yaml/ConnectionYaml.md#questdbparametersspec)*| +|[`teradata`](../../reference/yaml/ConnectionYaml.md#teradataparametersspec)|Teradata connection parameters.|*[TeradataParametersSpec](../../reference/yaml/ConnectionYaml.md#teradataparametersspec)*| |[`run_checks_job_template`](./common.md#checksearchfilters)|Configured parameters for the "check run" job that should be pushed to the job queue in order to run all checks within this connection.|*[CheckSearchFilters](./common.md#checksearchfilters)*| |[`run_profiling_checks_job_template`](./common.md#checksearchfilters)|Configured parameters for the "check run" job that should be pushed to the job queue in order to run profiling checks within this connection.|*[CheckSearchFilters](./common.md#checksearchfilters)*| |[`run_monitoring_checks_job_template`](./common.md#checksearchfilters)|Configured parameters for the "check run" job that should be pushed to the job queue in order to run monitoring checks within this connection.|*[CheckSearchFilters](./common.md#checksearchfilters)*| diff --git a/docs/client/models/environment.md b/docs/client/models/environment.md index 5dfd2643c2..e119619257 100644 --- a/docs/client/models/environment.md +++ b/docs/client/models/environment.md @@ -72,6 +72,7 @@ The model that describes the current user and his access rights. |`can_change_own_password`|User can change his own password in DQOps Cloud, because the DQOps Cloud Pairing API Key is valid and synchronization is enabled.|*boolean*| |`can_use_data_domains`|User can use data domains. Support for data domains requires an ENTERPRISE license of DQOps.|*boolean*| |`can_synchronize_to_data_catalog`|User can synchronize data to a data catalog. The instance must be configured correctly and the user must have at least an EDITOR role.|*boolean*| +|`can_use_ai_anomaly_detection`|The DQOps instance is a paid version with advanced AI anomaly prediction.|*boolean*| ___ diff --git a/docs/client/operations/environment.md b/docs/client/operations/environment.md index 7f91c12120..4d2d72e9cb 100644 --- a/docs/client/operations/environment.md +++ b/docs/client/operations/environment.md @@ -255,7 +255,8 @@ http://localhost:8888/api/environment/profile "can_manage_and_view_shared_credentials" : false, "can_change_own_password" : false, "can_use_data_domains" : false, - "can_synchronize_to_data_catalog" : false + "can_synchronize_to_data_catalog" : false, + "can_use_ai_anomaly_detection" : false } ``` @@ -303,7 +304,8 @@ http://localhost:8888/api/environment/profile can_manage_and_view_shared_credentials=False, can_change_own_password=False, can_use_data_domains=False, - can_synchronize_to_data_catalog=False + can_synchronize_to_data_catalog=False, + can_use_ai_anomaly_detection=False ) ``` @@ -352,7 +354,8 @@ http://localhost:8888/api/environment/profile can_manage_and_view_shared_credentials=False, can_change_own_password=False, can_use_data_domains=False, - can_synchronize_to_data_catalog=False + can_synchronize_to_data_catalog=False, + can_use_ai_anomaly_detection=False ) ``` @@ -404,7 +407,8 @@ http://localhost:8888/api/environment/profile can_manage_and_view_shared_credentials=False, can_change_own_password=False, can_use_data_domains=False, - can_synchronize_to_data_catalog=False + can_synchronize_to_data_catalog=False, + can_use_ai_anomaly_detection=False ) ``` @@ -456,7 +460,8 @@ http://localhost:8888/api/environment/profile can_manage_and_view_shared_credentials=False, can_change_own_password=False, can_use_data_domains=False, - can_synchronize_to_data_catalog=False + can_synchronize_to_data_catalog=False, + can_use_ai_anomaly_detection=False ) ``` diff --git a/docs/command-line-interface/connection.md b/docs/command-line-interface/connection.md index f2f58cd506..2c605bf70b 100644 --- a/docs/command-line-interface/connection.md +++ b/docs/command-line-interface/connection.md @@ -150,6 +150,8 @@ $ dqo [dqo options...] connection add [-h] [-fw] [-hl] [--sqlserver-disable-encr [--sqlserver-database=] [--sqlserver-host=] [--sqlserver-password=] [--sqlserver-port=] [--sqlserver-user=] [-t=] + [--teradata-host=] [--teradata-password=] + [--teradata-port=] [--teradata-user=] [--trino-catalog=] [--trino-engine=] [--trino-host=] [--trino-password=] [--trino-port=] [--trino-user=] @@ -161,6 +163,7 @@ $ dqo [dqo options...] connection add [-h] [-fw] [-hl] [--sqlserver-disable-encr [-O=]... [-P=]... [-Q=]... [-R=]... [-S=]... [-T=]... + [-TE=]... ``` @@ -241,6 +244,8 @@ dqo> connection add [-h] [-fw] [-hl] [--sqlserver-disable-encryption] [--sqlserver-database=] [--sqlserver-host=] [--sqlserver-password=] [--sqlserver-port=] [--sqlserver-user=] [-t=] + [--teradata-host=] [--teradata-password=] + [--teradata-port=] [--teradata-user=] [--trino-catalog=] [--trino-engine=] [--trino-host=] [--trino-password=] [--trino-port=] [--trino-user=] @@ -252,6 +257,7 @@ dqo> connection add [-h] [-fw] [-hl] [--sqlserver-disable-encryption] [-O=]... [-P=]... [-Q=]... [-R=]... [-S=]... [-T=]... + [-TE=]... ``` @@ -348,7 +354,7 @@ All parameters supported by the command are listed below. |
`--presto-password`
|Presto database password. The value can be in the null format to use dynamic substitution.| || |
`--presto-port`
|Presto port number| || |
`--presto-user`
|Presto user name. The value can be in the null format to use dynamic substitution.| || -|
`-t`
`--provider`
|Connection provider type| |*bigquery*
*databricks*
*mysql*
*oracle*
*postgresql*
*duckdb*
*presto*
*redshift*
*snowflake*
*spark*
*sqlserver*
*trino*
*hana*
*db2*
*mariadb*
*clickhouse*
*questdb*
| +|
`-t`
`--provider`
|Connection provider type| |*bigquery*
*clickhouse*
*databricks*
*db2*
*duckdb*
*hana*
*mariadb*
*mysql*
*oracle*
*postgresql*
*presto*
*questdb*
*redshift*
*snowflake*
*spark*
*sqlserver*
*teradata*
*trino*
| |
`--questdb-database`
|QuestDB database name. The value can be in the null format to use dynamic substitution.| || |
`--questdb-host`
|QuestDB host name| || |
`--questdb-password`
|QuestDB database password. The value can be in the null format to use dynamic substitution.| || @@ -378,6 +384,10 @@ All parameters supported by the command are listed below. |
`--sqlserver-password`
|SQL Server database password. The value can be in the null format to use dynamic substitution.| || |
`--sqlserver-port`
|SQL Server port number| || |
`--sqlserver-user`
|SQL Server user name. The value can be in the null format to use dynamic substitution.| || +|
`--teradata-host`
|Teradata host name| || +|
`--teradata-password`
|Teradata database password. The value can be in the null format to use dynamic substitution.| || +|
`--teradata-port`
|Teradata port number| || +|
`--teradata-user`
|Teradata user name. The value can be in the null format to use dynamic substitution.| || |
`--trino-catalog`
|The Trino catalog that contains the databases and the tables that will be accessed with the driver. Supports also a null configuration with a custom environment variable.| || |
`--trino-engine`
|Trino engine type.| |*trino*
*athena*
| |
`--trino-host`
|Trino host name.| || @@ -400,6 +410,7 @@ All parameters supported by the command are listed below. |
`-R`
|Redshift additional properties that are added to the JDBC connection string| || |
`-S`
|SQL Server additional properties that are added to the JDBC connection string| || |
`-T`
|Trino additional properties that are added to the JDBC connection string| || +|
`-TE`
|Teradata additional properties that are added to the JDBC connection string.| || @@ -550,7 +561,9 @@ $ dqo [dqo options...] connection update [-h] [-fw] [-hl] [--sqlserver-disable-e [--sqlserver-authentication-mode=] [--sqlserver-database=] [--sqlserver-host=] [--sqlserver-password=] [--sqlserver-port=] - [--sqlserver-user=] [--trino-catalog=] + [--sqlserver-user=] [--teradata-host=] + [--teradata-password=] [--teradata-port=] + [--teradata-user=] [--trino-catalog=] [--trino-engine=] [--trino-host=] [--trino-password=] [--trino-port=] [--trino-user=] [-C=]... @@ -561,7 +574,7 @@ $ dqo [dqo options...] connection update [-h] [-fw] [-hl] [--sqlserver-disable-e [-MA=]... [-O=]... [-P=]... [-Q=]... [-R=]... [-S=]... - [-T=]... + [-T=]... [-TE=]... ``` @@ -646,7 +659,9 @@ dqo> connection update [-h] [-fw] [-hl] [--sqlserver-disable-encryption] [--sqlserver-authentication-mode=] [--sqlserver-database=] [--sqlserver-host=] [--sqlserver-password=] [--sqlserver-port=] - [--sqlserver-user=] [--trino-catalog=] + [--sqlserver-user=] [--teradata-host=] + [--teradata-password=] [--teradata-port=] + [--teradata-user=] [--trino-catalog=] [--trino-engine=] [--trino-host=] [--trino-password=] [--trino-port=] [--trino-user=] [-C=]... @@ -657,7 +672,7 @@ dqo> connection update [-h] [-fw] [-hl] [--sqlserver-disable-encryption] [-MA=]... [-O=]... [-P=]... [-Q=]... [-R=]... [-S=]... - [-T=]... + [-T=]... [-TE=]... ``` @@ -783,6 +798,10 @@ All parameters supported by the command are listed below. |
`--sqlserver-password`
|SQL Server database password. The value can be in the null format to use dynamic substitution.| || |
`--sqlserver-port`
|SQL Server port number| || |
`--sqlserver-user`
|SQL Server user name. The value can be in the null format to use dynamic substitution.| || +|
`--teradata-host`
|Teradata host name| || +|
`--teradata-password`
|Teradata database password. The value can be in the null format to use dynamic substitution.| || +|
`--teradata-port`
|Teradata port number| || +|
`--teradata-user`
|Teradata user name. The value can be in the null format to use dynamic substitution.| || |
`--trino-catalog`
|The Trino catalog that contains the databases and the tables that will be accessed with the driver. Supports also a null configuration with a custom environment variable.| || |
`--trino-engine`
|Trino engine type.| |*trino*
*athena*
| |
`--trino-host`
|Trino host name.| || @@ -805,6 +824,7 @@ All parameters supported by the command are listed below. |
`-R`
|Redshift additional properties that are added to the JDBC connection string| || |
`-S`
|SQL Server additional properties that are added to the JDBC connection string| || |
`-T`
|Trino additional properties that are added to the JDBC connection string| || +|
`-TE`
|Teradata additional properties that are added to the JDBC connection string.| || diff --git a/docs/command-line-interface/sensor.md b/docs/command-line-interface/sensor.md index ce931d2b63..f76667b075 100644 --- a/docs/command-line-interface/sensor.md +++ b/docs/command-line-interface/sensor.md @@ -50,7 +50,7 @@ All parameters supported by the command are listed below. |
`--headless`
`-hl`
|Starts DQOps in a headless mode. When DQOps runs in a headless mode and the application cannot start because the DQOps Cloud API key is missing or the DQOps user home folder is not configured, DQOps will stop silently instead of asking the user to approve the setup of the DQOps user home folder structure and/or log into DQOps Cloud.| || |
`-h`
`--help`
|Show the help for the command and parameters| || |
`-of`
`--output-format`
|Output format for tabular responses| |*TABLE*
*CSV*
*JSON*
| -|
`-p`
`--provider`
|Provider type| |*bigquery*
*databricks*
*mysql*
*oracle*
*postgresql*
*duckdb*
*presto*
*redshift*
*snowflake*
*spark*
*sqlserver*
*trino*
*hana*
*db2*
*mariadb*
*clickhouse*
*questdb*
| +|
`-p`
`--provider`
|Provider type| |*bigquery*
*clickhouse*
*databricks*
*db2*
*duckdb*
*hana*
*mariadb*
*mysql*
*oracle*
*postgresql*
*presto*
*questdb*
*redshift*
*snowflake*
*spark*
*sqlserver*
*teradata*
*trino*
| |
`-s`
`--sensor`
|Sensor name| || diff --git a/docs/reference/rules/Averages.md b/docs/reference/rules/Averages.md index fda5f3f59a..47349c3976 100644 --- a/docs/reference/rules/Averages.md +++ b/docs/reference/rules/Averages.md @@ -102,6 +102,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch from datetime import datetime from typing import Sequence import scipy + import numpy as np # rule specific parameters object, contains values received from the quality check threshold configuration @@ -157,7 +158,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch if len(filtered) == 0: return RuleExecutionResult() - filtered_mean = float(scipy.mean(filtered)) + filtered_mean = float(np.mean(filtered)) upper_bound = filtered_mean * (1.0 + rule_parameters.parameters.max_percent_above / 100.0) lower_bound = filtered_mean * (1.0 - rule_parameters.parameters.max_percent_below / 100.0) @@ -268,6 +269,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch from datetime import datetime from typing import Sequence import scipy + import numpy as np # rule specific parameters object, contains values received from the quality check threshold configuration @@ -323,7 +325,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch if len(filtered) == 0: return RuleExecutionResult() - filtered_mean = float(scipy.mean(filtered)) + filtered_mean = float(np.mean(filtered)) upper_bound = filtered_mean * (1.0 + rule_parameters.parameters.max_percent_above / 100.0) lower_bound = filtered_mean * (1.0 - rule_parameters.parameters.max_percent_below / 100.0) @@ -434,6 +436,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch from datetime import datetime from typing import Sequence import scipy + import numpy as np # rule specific parameters object, contains values received from the quality check threshold configuration @@ -489,7 +492,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch if len(filtered) == 0: return RuleExecutionResult() - filtered_mean = float(scipy.mean(filtered)) + filtered_mean = float(np.mean(filtered)) upper_bound = filtered_mean * (1.0 + rule_parameters.parameters.max_percent_above / 100.0) lower_bound = filtered_mean * (1.0 - rule_parameters.parameters.max_percent_below / 100.0) @@ -598,6 +601,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch from datetime import datetime from typing import Sequence import scipy + import numpy as np # rule specific parameters object, contains values received from the quality check threshold configuration @@ -653,7 +657,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch if len(filtered) == 0: return RuleExecutionResult() - filtered_mean = float(scipy.mean(filtered)) + filtered_mean = float(np.mean(filtered)) upper_bound = filtered_mean * (1.0 + rule_parameters.parameters.max_percent_above / 100.0) lower_bound = filtered_mean * (1.0 - rule_parameters.parameters.max_percent_below / 100.0) @@ -755,6 +759,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch from datetime import datetime from typing import Sequence import scipy + import numpy as np # rule specific parameters object, contains values received from the quality check threshold configuration @@ -808,7 +813,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch if len(filtered) == 0: return RuleExecutionResult() - filtered_mean = float(scipy.mean(filtered)) + filtered_mean = float(np.mean(filtered)) upper_bound = filtered_mean * (1.0 + rule_parameters.parameters.max_percent_within / 100.0) lower_bound = filtered_mean * (1.0 - rule_parameters.parameters.max_percent_within / 100.0) @@ -910,6 +915,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch from datetime import datetime from typing import Sequence import scipy + import numpy as np # rule specific parameters object, contains values received from the quality check threshold configuration @@ -963,7 +969,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch if len(filtered) == 0: return RuleExecutionResult() - filtered_mean = float(scipy.mean(filtered)) + filtered_mean = float(np.mean(filtered)) upper_bound = filtered_mean * (1.0 + rule_parameters.parameters.max_percent_within / 100.0) lower_bound = filtered_mean * (1.0 - rule_parameters.parameters.max_percent_within / 100.0) @@ -1065,6 +1071,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch from datetime import datetime from typing import Sequence import scipy + import numpy as np # rule specific parameters object, contains values received from the quality check threshold configuration @@ -1118,7 +1125,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch if len(filtered) == 0: return RuleExecutionResult() - filtered_mean = float(scipy.mean(filtered)) + filtered_mean = float(np.mean(filtered)) upper_bound = filtered_mean * (1.0 + rule_parameters.parameters.max_percent_within / 100.0) lower_bound = filtered_mean * (1.0 - rule_parameters.parameters.max_percent_within / 100.0) diff --git a/docs/reference/rules/Percentile.md b/docs/reference/rules/Percentile.md index b253fbe04e..a4763c8355 100644 --- a/docs/reference/rules/Percentile.md +++ b/docs/reference/rules/Percentile.md @@ -70,8 +70,10 @@ The rule definition YAML file *percentile/anomaly_differencing_percentile_moving \ AI models is not supported in an open-source distribution of DQOps. Please\ \ contact DQOps support to upgrade your instance to a closed-source DQOps distribution." data_type: boolean + display_hint: requires_paid_version parameters: degrees_of_freedom: 5 + ai_degrees_of_freedom: 8 ``` @@ -82,6 +84,7 @@ The rule definition YAML file *percentile/anomaly_differencing_percentile_moving | Parameters name | Value | |-----------------|-------| |*degrees_of_freedom*|5| +|*ai_degrees_of_freedom*|8| @@ -94,7 +97,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch ``` { .python linenums="1" } # - # Copyright © 2023 DQOps (support@dqops.com) + # Copyright © 2024 DQOps (support@dqops.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -114,8 +117,8 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch import numpy as np import scipy import scipy.stats - from lib.anomalies.data_preparation import convert_historic_data_differencing - from lib.anomalies.anomaly_detection import detect_upper_bound_anomaly, detect_lower_bound_anomaly + from lib.anomalies.data_preparation import convert_historic_data_differencing, average_forecast + from lib.anomalies.anomaly_detection import detect_upper_bound_anomaly, detect_lower_bound_anomaly, detect_anomaly # rule specific parameters object, contains values received from the quality check threshold configuration @@ -206,8 +209,8 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch anomaly_data = convert_historic_data_differencing(rule_parameters.previous_readouts, lambda readout: (readout / differences_median_float - 1.0 if readout >= differences_median_float else (-1.0 / (readout / differences_median_float)) + 1.0)) - threshold_upper_multiple = detect_upper_bound_anomaly(historic_data=anomaly_data, median=0.0, - tail=tail, parameters=rule_parameters) + threshold_upper_multiple, threshold_lower_multiple, forecast_multiple = detect_anomaly(historic_data=anomaly_data, median=0.0, + tail=tail, parameters=rule_parameters) passed = True if threshold_upper_multiple is not None: @@ -216,16 +219,24 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch else: threshold_upper = None - threshold_lower_multiple = detect_lower_bound_anomaly(historic_data=anomaly_data, median=0.0, - tail=tail, parameters=rule_parameters) - if threshold_lower_multiple is not None: threshold_lower = differences_median_float * (-1.0 / (threshold_lower_multiple - 1.0)) passed = passed and threshold_lower <= actual_difference else: threshold_lower = None - expected_value = last_readout + differences_median_float + if forecast_multiple is not None: + if forecast_multiple >= 0: + forecast = (forecast_multiple + 1.0) * differences_median_float + else: + forecast = differences_median_float * (-1.0 / (forecast_multiple - 1.0)) + else: + forecast = differences_median_float + + if forecast is not None: + expected_value = last_readout + forecast + else: + expected_value = None if threshold_lower is not None: lower_bound = last_readout + threshold_lower @@ -242,8 +253,8 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch # using unrestricted method for both positive and negative values anomaly_data = convert_historic_data_differencing(rule_parameters.previous_readouts, lambda readout: readout) - threshold_upper_result = detect_upper_bound_anomaly(historic_data=anomaly_data, median=differences_median_float, - tail=tail, parameters=rule_parameters) + threshold_upper_result, threshold_lower_result, forecast = detect_anomaly(historic_data=anomaly_data, median=differences_median_float, + tail=tail, parameters=rule_parameters) passed = True if threshold_upper_result is not None: @@ -252,15 +263,17 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch else: threshold_upper = None - threshold_lower_result = detect_lower_bound_anomaly(historic_data=anomaly_data, median=differences_median_float, - tail=tail, parameters=rule_parameters) if threshold_lower_result is not None: threshold_lower = threshold_lower_result passed = passed and threshold_lower <= actual_difference else: threshold_lower = None - expected_value = last_readout + differences_median_float + if forecast is not None: + expected_value = last_readout + forecast + else: + expected_value = None + if threshold_lower is not None: lower_bound = last_readout + threshold_lower else: @@ -341,8 +354,10 @@ The rule definition YAML file *percentile/anomaly_differencing_percentile_moving \ AI models is not supported in an open-source distribution of DQOps. Please\ \ contact DQOps support to upgrade your instance to a closed-source DQOps distribution." data_type: boolean + display_hint: requires_paid_version parameters: degrees_of_freedom: 5 + ai_degrees_of_freedom: 8 ``` @@ -353,6 +368,7 @@ The rule definition YAML file *percentile/anomaly_differencing_percentile_moving | Parameters name | Value | |-----------------|-------| |*degrees_of_freedom*|5| +|*ai_degrees_of_freedom*|8| @@ -365,7 +381,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch ``` { .python linenums="1" } # - # Copyright © 2023 DQOps (support@dqops.com) + # Copyright © 2024 DQOps (support@dqops.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -385,8 +401,8 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch import numpy as np import scipy import scipy.stats - from lib.anomalies.data_preparation import convert_historic_data_differencing - from lib.anomalies.anomaly_detection import detect_upper_bound_anomaly, detect_lower_bound_anomaly + from lib.anomalies.data_preparation import convert_historic_data_differencing, average_forecast + from lib.anomalies.anomaly_detection import detect_upper_bound_anomaly, detect_lower_bound_anomaly, detect_anomaly # rule specific parameters object, contains values received from the quality check threshold configuration @@ -477,8 +493,8 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch anomaly_data = convert_historic_data_differencing(rule_parameters.previous_readouts, lambda readout: (readout / differences_median_float - 1.0 if readout >= differences_median_float else (-1.0 / (readout / differences_median_float)) + 1.0)) - threshold_upper_multiple = detect_upper_bound_anomaly(historic_data=anomaly_data, median=0.0, - tail=tail, parameters=rule_parameters) + threshold_upper_multiple, threshold_lower_multiple, forecast_multiple = detect_anomaly(historic_data=anomaly_data, median=0.0, + tail=tail, parameters=rule_parameters) passed = True if threshold_upper_multiple is not None: @@ -487,16 +503,24 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch else: threshold_upper = None - threshold_lower_multiple = detect_lower_bound_anomaly(historic_data=anomaly_data, median=0.0, - tail=tail, parameters=rule_parameters) - if threshold_lower_multiple is not None: threshold_lower = differences_median_float * (-1.0 / (threshold_lower_multiple - 1.0)) passed = passed and threshold_lower <= actual_difference else: threshold_lower = None - expected_value = last_readout + differences_median_float + if forecast_multiple is not None: + if forecast_multiple >= 0: + forecast = (forecast_multiple + 1.0) * differences_median_float + else: + forecast = differences_median_float * (-1.0 / (forecast_multiple - 1.0)) + else: + forecast = differences_median_float + + if forecast is not None: + expected_value = last_readout + forecast + else: + expected_value = None if threshold_lower is not None: lower_bound = last_readout + threshold_lower @@ -513,8 +537,8 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch # using unrestricted method for both positive and negative values anomaly_data = convert_historic_data_differencing(rule_parameters.previous_readouts, lambda readout: readout) - threshold_upper_result = detect_upper_bound_anomaly(historic_data=anomaly_data, median=differences_median_float, - tail=tail, parameters=rule_parameters) + threshold_upper_result, threshold_lower_result, forecast = detect_anomaly(historic_data=anomaly_data, median=differences_median_float, + tail=tail, parameters=rule_parameters) passed = True if threshold_upper_result is not None: @@ -523,15 +547,17 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch else: threshold_upper = None - threshold_lower_result = detect_lower_bound_anomaly(historic_data=anomaly_data, median=differences_median_float, - tail=tail, parameters=rule_parameters) if threshold_lower_result is not None: threshold_lower = threshold_lower_result passed = passed and threshold_lower <= actual_difference else: threshold_lower = None - expected_value = last_readout + differences_median_float + if forecast is not None: + expected_value = last_readout + forecast + else: + expected_value = None + if threshold_lower is not None: lower_bound = last_readout + threshold_lower else: @@ -609,8 +635,10 @@ The rule definition YAML file *percentile/anomaly_partition_row_count.dqorule.ya \ AI models is not supported in an open-source distribution of DQOps. Please\ \ contact DQOps support to upgrade your instance to a closed-source DQOps distribution." data_type: boolean + display_hint: requires_paid_version parameters: degrees_of_freedom: 5 + ai_degrees_of_freedom: 8 ``` @@ -621,6 +649,7 @@ The rule definition YAML file *percentile/anomaly_partition_row_count.dqorule.ya | Parameters name | Value | |-----------------|-------| |*degrees_of_freedom*|5| +|*ai_degrees_of_freedom*|8| @@ -633,7 +662,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch ``` { .python linenums="1" } # - # Copyright © 2023 DQOps (support@dqops.com) + # Copyright © 2024 DQOps (support@dqops.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -653,8 +682,8 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch import numpy as np import scipy import scipy.stats - from lib.anomalies.data_preparation import convert_historic_data_stationary - from lib.anomalies.anomaly_detection import detect_upper_bound_anomaly, detect_lower_bound_anomaly + from lib.anomalies.data_preparation import convert_historic_data_stationary, average_forecast + from lib.anomalies.anomaly_detection import detect_upper_bound_anomaly, detect_lower_bound_anomaly, detect_anomaly # rule specific parameters object, contains values received from the quality check threshold configuration @@ -739,8 +768,8 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch anomaly_data = convert_historic_data_stationary(rule_parameters.previous_readouts, lambda readout: (readout / filtered_median_float - 1.0 if readout >= filtered_median_float else (-1.0 / (readout / filtered_median_float)) + 1.0)) - threshold_upper_multiple = detect_upper_bound_anomaly(historic_data=anomaly_data, median=0.0, - tail=tail, parameters=rule_parameters) + threshold_upper_multiple, threshold_lower_multiple, forecast_multiple = detect_anomaly(historic_data=anomaly_data, median=0.0, + tail=tail, parameters=rule_parameters) passed = True if threshold_upper_multiple is not None: @@ -749,16 +778,21 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch else: threshold_upper = None - threshold_lower_multiple = detect_lower_bound_anomaly(historic_data=anomaly_data, median=0.0, - tail=tail, parameters=rule_parameters) - if threshold_lower_multiple is not None: threshold_lower = filtered_median_float * (-1.0 / (threshold_lower_multiple - 1.0)) passed = passed and threshold_lower <= rule_parameters.actual_value else: threshold_lower = None - expected_value = filtered_median_float + if forecast_multiple is not None: + if forecast_multiple >= 0: + forecast = (forecast_multiple + 1.0) * filtered_median_float + else: + forecast = filtered_median_float * (-1.0 / (forecast_multiple - 1.0)) + else: + forecast = filtered_median_float + + expected_value = forecast lower_bound = threshold_lower upper_bound = threshold_upper return RuleExecutionResult(passed, expected_value, lower_bound, upper_bound) @@ -829,8 +863,10 @@ The rule definition YAML file *percentile/anomaly_stationary_count_values.dqorul \ AI models is not supported in an open-source distribution of DQOps. Please\ \ contact DQOps support to upgrade your instance to a closed-source DQOps distribution." data_type: boolean + display_hint: requires_paid_version parameters: degrees_of_freedom: 5 + ai_degrees_of_freedom: 8 ``` @@ -841,6 +877,7 @@ The rule definition YAML file *percentile/anomaly_stationary_count_values.dqorul | Parameters name | Value | |-----------------|-------| |*degrees_of_freedom*|5| +|*ai_degrees_of_freedom*|8| @@ -853,7 +890,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch ``` { .python linenums="1" } # - # Copyright © 2023 DQOps (support@dqops.com) + # Copyright © 2024 DQOps (support@dqops.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -873,8 +910,8 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch import numpy as np import scipy import scipy.stats - from lib.anomalies.data_preparation import convert_historic_data_stationary - from lib.anomalies.anomaly_detection import detect_upper_bound_anomaly, detect_lower_bound_anomaly + from lib.anomalies.data_preparation import convert_historic_data_stationary, average_forecast + from lib.anomalies.anomaly_detection import detect_upper_bound_anomaly, detect_lower_bound_anomaly, detect_anomaly # rule specific parameters object, contains values received from the quality check threshold configuration @@ -962,8 +999,8 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch anomaly_data = convert_historic_data_stationary(rule_parameters.previous_readouts, lambda readout: (readout / filtered_median_float - 1.0 if readout >= filtered_median_float else (-1.0 / (readout / filtered_median_float)) + 1.0)) - threshold_upper_multiple = detect_upper_bound_anomaly(historic_data=anomaly_data, median=0.0, - tail=tail, parameters=rule_parameters) + threshold_upper_multiple, threshold_lower_multiple, forecast_multiple = detect_anomaly(historic_data=anomaly_data, median=0.0, + tail=tail, parameters=rule_parameters) passed = True if threshold_upper_multiple is not None: @@ -972,16 +1009,21 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch else: threshold_upper = None - threshold_lower_multiple = detect_lower_bound_anomaly(historic_data=anomaly_data, median=0.0, - tail=tail, parameters=rule_parameters) - if threshold_lower_multiple is not None: threshold_lower = filtered_median_float * (-1.0 / (threshold_lower_multiple - 1.0)) passed = passed and threshold_lower <= rule_parameters.actual_value else: threshold_lower = None - expected_value = filtered_median_float + if forecast_multiple is not None: + if forecast_multiple >= 0: + forecast = (forecast_multiple + 1.0) * filtered_median_float + else: + forecast = filtered_median_float * (-1.0 / (forecast_multiple - 1.0)) + else: + forecast = filtered_median_float + + expected_value = forecast lower_bound = threshold_lower upper_bound = threshold_upper return RuleExecutionResult(passed, expected_value, lower_bound, upper_bound) @@ -1052,8 +1094,10 @@ The rule definition YAML file *percentile/anomaly_stationary_percent_values.dqor \ AI models is not supported in an open-source distribution of DQOps. Please\ \ contact DQOps support to upgrade your instance to a closed-source DQOps distribution." data_type: boolean + display_hint: requires_paid_version parameters: degrees_of_freedom: 5 + ai_degrees_of_freedom: 8 ``` @@ -1064,6 +1108,7 @@ The rule definition YAML file *percentile/anomaly_stationary_percent_values.dqor | Parameters name | Value | |-----------------|-------| |*degrees_of_freedom*|5| +|*ai_degrees_of_freedom*|8| @@ -1096,7 +1141,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch import numpy as np import scipy import scipy.stats - from lib.anomalies.data_preparation import convert_historic_data_stationary + from lib.anomalies.data_preparation import convert_historic_data_stationary, average_forecast from lib.anomalies.anomaly_detection import detect_upper_bound_anomaly, detect_lower_bound_anomaly @@ -1187,31 +1232,41 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch if 100.0 in all_extracted: threshold_upper = 100.0 + forecast_upper = filtered_median_float else: anomaly_data_upper = convert_historic_data_stationary(rule_parameters.previous_readouts, lambda readout: 1.0 / (1.0 - readout / 100.0)) - threshold_upper_multiple = detect_upper_bound_anomaly(historic_data=anomaly_data_upper, + threshold_upper_multiple, forecast_upper_multiple = detect_upper_bound_anomaly(historic_data=anomaly_data_upper, median=1.0 / (1.0 - filtered_median_float / 100.0), tail=tail, parameters=rule_parameters) if threshold_upper_multiple is not None: threshold_upper = 100.0 - 100.0 * (1.0 / threshold_upper_multiple) + forecast_upper = 100.0 - 100.0 * (1.0 / forecast_upper_multiple) passed = rule_parameters.actual_value <= threshold_upper + else: + threshold_upper = None + forecast_upper = None if 0.0 in all_extracted: threshold_lower = 0.0 + forecast_lower = filtered_median_float else: anomaly_data_lower = convert_historic_data_stationary(rule_parameters.previous_readouts, lambda readout: (-1.0 / (readout / filtered_median_float))) - threshold_lower_multiple = detect_lower_bound_anomaly(historic_data=anomaly_data_lower, + threshold_lower_multiple, forecast_lower_multiple = detect_lower_bound_anomaly(historic_data=anomaly_data_lower, median=-1.0, tail=tail, parameters=rule_parameters) if threshold_lower_multiple is not None: threshold_lower = filtered_median_float * (-1.0 / threshold_lower_multiple) + forecast_lower = filtered_median_float * (-1.0 / forecast_lower_multiple) passed = passed and threshold_lower <= rule_parameters.actual_value + else: + threshold_lower = None + forecast_lower = None - expected_value = filtered_median_float + expected_value = average_forecast(forecast_upper, forecast_lower) lower_bound = threshold_lower upper_bound = threshold_upper return RuleExecutionResult(passed, expected_value, lower_bound, upper_bound) @@ -1283,8 +1338,10 @@ The rule definition YAML file *percentile/anomaly_stationary_percentile_moving_a \ AI models is not supported in an open-source distribution of DQOps. Please\ \ contact DQOps support to upgrade your instance to a closed-source DQOps distribution." data_type: boolean + display_hint: requires_paid_version parameters: degrees_of_freedom: 5 + ai_degrees_of_freedom: 8 ``` @@ -1295,6 +1352,7 @@ The rule definition YAML file *percentile/anomaly_stationary_percentile_moving_a | Parameters name | Value | |-----------------|-------| |*degrees_of_freedom*|5| +|*ai_degrees_of_freedom*|8| @@ -1307,7 +1365,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch ``` { .python linenums="1" } # - # Copyright © 2023 DQOps (support@dqops.com) + # Copyright © 2024 DQOps (support@dqops.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -1327,8 +1385,8 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch import numpy as np import scipy import scipy.stats - from lib.anomalies.data_preparation import convert_historic_data_stationary - from lib.anomalies.anomaly_detection import detect_upper_bound_anomaly, detect_lower_bound_anomaly + from lib.anomalies.data_preparation import convert_historic_data_stationary, average_forecast + from lib.anomalies.anomaly_detection import detect_upper_bound_anomaly, detect_lower_bound_anomaly, detect_anomaly # rule specific parameters object, contains values received from the quality check threshold configuration @@ -1415,8 +1473,8 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch anomaly_data = convert_historic_data_stationary(rule_parameters.previous_readouts, lambda readout: (readout / filtered_median_float - 1.0 if readout >= filtered_median_float else (-1.0 / (readout / filtered_median_float)) + 1.0)) - threshold_upper_multiple = detect_upper_bound_anomaly(historic_data=anomaly_data, median=0.0, - tail=tail, parameters=rule_parameters) + threshold_upper_multiple, threshold_lower_multiple, forecast_multiple = detect_anomaly(historic_data=anomaly_data, median=0.0, + tail=tail, parameters=rule_parameters) passed = True if threshold_upper_multiple is not None: @@ -1425,24 +1483,30 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch else: threshold_upper = None - threshold_lower_multiple = detect_lower_bound_anomaly(historic_data=anomaly_data, median=0.0, - tail=tail, parameters=rule_parameters) - if threshold_lower_multiple is not None: threshold_lower = filtered_median_float * (-1.0 / (threshold_lower_multiple - 1.0)) passed = passed and threshold_lower <= rule_parameters.actual_value else: threshold_lower = None - expected_value = filtered_median_float + if forecast_multiple is not None: + if forecast_multiple >= 0: + forecast = (forecast_multiple + 1.0) * filtered_median_float + else: + forecast = filtered_median_float * (-1.0 / (forecast_multiple - 1.0)) + else: + forecast = filtered_median_float + + expected_value = forecast lower_bound = threshold_lower upper_bound = threshold_upper return RuleExecutionResult(passed, expected_value, lower_bound, upper_bound) else: # using unrestricted method - threshold_upper_result = detect_upper_bound_anomaly(values=extracted, median=filtered_median_float, - tail=tail, parameters=rule_parameters) + anomaly_data = convert_historic_data_stationary(rule_parameters.previous_readouts, lambda readout: readout) + threshold_upper_result, threshold_lower_result, forecast = detect_anomaly(historic_data=anomaly_data, median=filtered_median_float, + tail=tail, parameters=rule_parameters) passed = True if threshold_upper_result is not None: @@ -1451,15 +1515,13 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch else: threshold_upper = None - threshold_lower_result = detect_lower_bound_anomaly(values=extracted, median=filtered_median_float, - tail=tail, parameters=rule_parameters) if threshold_lower_result is not None: threshold_lower = threshold_lower_result passed = passed and threshold_lower <= rule_parameters.actual_value else: threshold_lower = None - expected_value = filtered_median_float + expected_value = forecast lower_bound = threshold_lower upper_bound = threshold_upper return RuleExecutionResult(passed, expected_value, lower_bound, upper_bound) @@ -1531,8 +1593,10 @@ The rule definition YAML file *percentile/anomaly_stationary_percentile_moving_a \ AI models is not supported in an open-source distribution of DQOps. Please\ \ contact DQOps support to upgrade your instance to a closed-source DQOps distribution." data_type: boolean + display_hint: requires_paid_version parameters: degrees_of_freedom: 5 + ai_degrees_of_freedom: 8 ``` @@ -1543,6 +1607,7 @@ The rule definition YAML file *percentile/anomaly_stationary_percentile_moving_a | Parameters name | Value | |-----------------|-------| |*degrees_of_freedom*|5| +|*ai_degrees_of_freedom*|8| @@ -1555,7 +1620,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch ``` { .python linenums="1" } # - # Copyright © 2023 DQOps (support@dqops.com) + # Copyright © 2024 DQOps (support@dqops.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -1575,8 +1640,8 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch import numpy as np import scipy import scipy.stats - from lib.anomalies.data_preparation import convert_historic_data_stationary - from lib.anomalies.anomaly_detection import detect_upper_bound_anomaly, detect_lower_bound_anomaly + from lib.anomalies.data_preparation import convert_historic_data_stationary, average_forecast + from lib.anomalies.anomaly_detection import detect_upper_bound_anomaly, detect_lower_bound_anomaly, detect_anomaly # rule specific parameters object, contains values received from the quality check threshold configuration @@ -1601,11 +1666,8 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch degrees_of_freedom: float + # rule execution parameters, contains the sensor value (actual_value) and the rule parameters class RuleExecutionRunParameters: - """ - Rule execution parameters, contains the sensor value (actual_value) and the rule parameters - """ - actual_value: float parameters: AnomalyStationaryPercentileMovingAverageRuleParametersSpec time_period_local_epoch: int @@ -1664,10 +1726,10 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch if all(readout > 0 for readout in extracted): # using a 0-based calculation (scale from 0) anomaly_data = convert_historic_data_stationary(rule_parameters.previous_readouts, - lambda readout: (readout / filtered_median_float - 1.0 if readout >= filtered_median_float else - (-1.0 / (readout / filtered_median_float)) + 1.0)) - threshold_upper_multiple = detect_upper_bound_anomaly(historic_data=anomaly_data, median=0.0, - tail=tail, parameters=rule_parameters) + lambda readout: (readout / filtered_median_float - 1.0 if readout >= filtered_median_float else + (-1.0 / (readout / filtered_median_float)) + 1.0)) + threshold_upper_multiple, threshold_lower_multiple, forecast_multiple = detect_anomaly(historic_data=anomaly_data, median=0.0, + tail=tail, parameters=rule_parameters) passed = True if threshold_upper_multiple is not None: @@ -1676,24 +1738,30 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch else: threshold_upper = None - threshold_lower_multiple = detect_lower_bound_anomaly(historic_data=anomaly_data, median=0.0, - tail=tail, parameters=rule_parameters) - if threshold_lower_multiple is not None: threshold_lower = filtered_median_float * (-1.0 / (threshold_lower_multiple - 1.0)) passed = passed and threshold_lower <= rule_parameters.actual_value else: threshold_lower = None - expected_value = filtered_median_float + if forecast_multiple is not None: + if forecast_multiple >= 0: + forecast = (forecast_multiple + 1.0) * filtered_median_float + else: + forecast = filtered_median_float * (-1.0 / (forecast_multiple - 1.0)) + else: + forecast = filtered_median_float + + expected_value = forecast lower_bound = threshold_lower upper_bound = threshold_upper return RuleExecutionResult(passed, expected_value, lower_bound, upper_bound) else: # using unrestricted method - threshold_upper_result = detect_upper_bound_anomaly(values=extracted, median=filtered_median_float, - tail=tail, parameters=rule_parameters) + anomaly_data = convert_historic_data_stationary(rule_parameters.previous_readouts, lambda readout: readout) + threshold_upper_result, threshold_lower_result, forecast = detect_anomaly(historic_data=anomaly_data, median=filtered_median_float, + tail=tail, parameters=rule_parameters) passed = True if threshold_upper_result is not None: @@ -1702,15 +1770,13 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch else: threshold_upper = None - threshold_lower_result = detect_lower_bound_anomaly(values=extracted, median=filtered_median_float, - tail=tail, parameters=rule_parameters) if threshold_lower_result is not None: threshold_lower = threshold_lower_result passed = passed and threshold_lower <= rule_parameters.actual_value else: threshold_lower = None - expected_value = filtered_median_float + expected_value = forecast lower_bound = threshold_lower upper_bound = threshold_upper return RuleExecutionResult(passed, expected_value, lower_bound, upper_bound) @@ -1781,8 +1847,10 @@ The rule definition YAML file *percentile/anomaly_timeliness_delay.dqorule.yaml* \ AI models is not supported in an open-source distribution of DQOps. Please\ \ contact DQOps support to upgrade your instance to a closed-source DQOps distribution." data_type: boolean + display_hint: requires_paid_version parameters: degrees_of_freedom: 5 + ai_degrees_of_freedom: 8 ``` @@ -1793,6 +1861,7 @@ The rule definition YAML file *percentile/anomaly_timeliness_delay.dqorule.yaml* | Parameters name | Value | |-----------------|-------| |*degrees_of_freedom*|5| +|*ai_degrees_of_freedom*|8| @@ -1805,7 +1874,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch ``` { .python linenums="1" } # - # Copyright © 2023 DQOps (support@dqops.com) + # Copyright © 2024 DQOps (support@dqops.com) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -1825,7 +1894,7 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch import numpy as np import scipy import scipy.stats - from lib.anomalies.data_preparation import convert_historic_data_stationary + from lib.anomalies.data_preparation import convert_historic_data_stationary, average_forecast from lib.anomalies.anomaly_detection import detect_upper_bound_anomaly, detect_lower_bound_anomaly @@ -1909,17 +1978,18 @@ The file is found in the *[$DQO_HOME](../../dqo-concepts/architecture/dqops-arch tail = rule_parameters.parameters.anomaly_percent / 100.0 anomaly_data = convert_historic_data_stationary(rule_parameters.previous_readouts, lambda readout: readout) - threshold_upper_multiple = detect_upper_bound_anomaly(historic_data=anomaly_data, median=filtered_median_float, + threshold_upper_multiple, forecast_upper_multiple = detect_upper_bound_anomaly(historic_data=anomaly_data, median=filtered_median_float, tail=tail, parameters=rule_parameters) passed = True if threshold_upper_multiple is not None: threshold_upper = threshold_upper_multiple + forecast_upper = forecast_upper_multiple passed = rule_parameters.actual_value <= threshold_upper else: threshold_upper = None - expected_value = filtered_median_float + expected_value = forecast_upper lower_bound = 0.0 # always, our target is to have a delay of 0.0 days upper_bound = threshold_upper return RuleExecutionResult(passed, expected_value, lower_bound, upper_bound) diff --git a/docs/reference/sensors/column/accepted_values-column-sensors.md b/docs/reference/sensors/column/accepted_values-column-sensors.md index 1d78bfddef..a46f0804db 100644 --- a/docs/reference/sensors/column/accepted_values-column-sensors.md +++ b/docs/reference/sensors/column/accepted_values-column-sensors.md @@ -615,6 +615,39 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values | length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {% endmacro -%} + + SELECT + {{ actual_value() }} AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1372,6 +1405,48 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{ lib.make_text_constant(i) }}, + {%- else -%} + {{ lib.make_text_constant(i) }} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_else() -%} + {%- if parameters.expected_values|length == 0 -%} + 0 + {%- else -%} + COUNT(DISTINCT + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN {{ lib.render_target_column('analyzed_table') }} + ELSE NULL + END + ) + {%- endif -%} + {%- endmacro -%} + + SELECT + CASE + WHEN COUNT(*) = 0 THEN NULL + ELSE {{ render_else() }} + END AS actual_value, + MAX(CAST({{ parameters.expected_values | length }} AS INT)) AS expected_value_alias + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -3061,6 +3136,102 @@ The templates used to generate the SQL query for each data source supported by D {%- endfor -%} {%- endif -%} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {%- endmacro -%} + + {%- macro render_from_subquery() -%} + FROM + ( + SELECT + top_col_values.top_value as top_value, + {% if lib.time_series is not none -%} + top_col_values.time_period as time_period, + top_col_values.time_period_utc as time_period_utc, + {% endif -%} + RANK() OVER({{- render_data_grouping('top_col_values', indentation = ' ', partition_by_enabled=true) }} + ORDER BY top_col_values.total_values DESC) as top_values_rank {{- render_data_grouping('top_col_values', indentation = ' ') }} + FROM + ( + SELECT + {{ lib.render_target_column('analyzed_table') }} AS top_value, + COUNT(*) AS total_values + {{- lib.render_data_grouping_projections('analyzed_table', indentation = ' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation = ' ') }} + FROM + {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(extra_filter = lib.render_target_column('analyzed_table') ~ ' IS NOT NULL', indentation = ' ') }} + GROUP BY {{ render_grouping_columns() -}} top_value + ) AS top_col_values + ) AS top_values + WHERE top_values_rank <= {{ parameters.top }} + {%- endmacro -%} + + {% macro render_grouping_columns() %} + {%- if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none -%} + {{ lib.render_grouping_column_names() }} {{- ', ' -}} + {%- endif -%} + {% endmacro %} + + {%- macro render_data_grouping(table_alias_prefix = '', indentation = '', partition_by_enabled = false) -%} + + {%- if partition_by_enabled == true -%}PARTITION BY + {%- if lib.time_series is not none -%} + {{" "}}top_col_values.time_period + {%- elif lib.data_groupings is none -%} + {{" "}}NULL + {%- endif -%} + {%- endif -%} + + {%- if lib.data_groupings is not none and (lib.data_groupings | length()) > 0 -%} + {%- for attribute in lib.data_groupings -%} + {{- "" if loop.first and lib.time_series is none and partition_by_enabled else "," -}} + {%- with data_grouping_level = lib.data_groupings[attribute] -%} + {%- if data_grouping_level.source == 'tag' -%} + {{ indentation }}{{ lib.make_text_constant(data_grouping_level.tag) }} + {%- elif data_grouping_level.source == 'column_value' -%} + {{ indentation }}{{ table_alias_prefix }}.grouping_{{ attribute }} + {%- endif -%} + {%- endwith %} + {%- endfor -%} + {%- endif -%} + {%- endmacro -%} + + SELECT + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 %} + MAX(1 + NULL) AS actual_value, + MAX(0) AS expected_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {%- else %} + COUNT(DISTINCT + CASE + WHEN top_values.top_value IN ({{ extract_in_list(parameters.expected_values) }}) THEN top_values.top_value + ELSE NULL + END + ) AS actual_value, + MAX({{ parameters.expected_values | length }}) AS expected_value + {%- if lib.time_series is not none -%} {{- "," }} + top_values.time_period, + top_values.time_period_utc + {%- endif -%} + {{- render_data_grouping('top_values', indentation = lib.eol() ~ ' ') }} + {{ render_from_subquery() }} + {%- endif -%} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -3792,6 +3963,42 @@ The templates used to generate the SQL query for each data source supported by D {%- endif -%} {% endmacro -%} + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro extract_in_list(values_list) -%} + {{ values_list|join(', ') -}} + {% endmacro %} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + {#- Two approaches can be taken here. What if COUNT(*) = 0 AND value set is empty? This solution is the most convenient. -#} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + SELECT {{ actual_value() }} AS actual_value {{- lib.render_data_grouping_projections('analyzed_table') }} @@ -4550,6 +4757,46 @@ The templates used to generate the SQL query for each data source supported by D {%- endif -%} {% endmacro -%} + SELECT + {{ actual_value() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {%- macro extract_in_list(values_list) -%} + {%- for i in values_list -%} + {%- if not loop.last -%} + {{lib.make_text_constant(i)}}{{", "}} + {%- else -%} + {{lib.make_text_constant(i)}} + {%- endif -%} + {%- endfor -%} + {% endmacro -%} + + {%- macro actual_value() -%} + {%- if 'expected_values' not in parameters or parameters.expected_values|length == 0 -%} + MAX(0.0) + {%- else -%} + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IN ({{ extract_in_list(parameters.expected_values) }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END + {%- endif -%} + {% endmacro -%} + SELECT {{ actual_value() }} AS actual_value {{- lib.render_data_grouping_projections('analyzed_table') }} @@ -5010,6 +5257,28 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('AF', 'AL', 'DZ', 'AS', 'AD', 'AO', 'AI', 'AQ', 'AG', 'AR', 'AM', 'AW', 'AU', 'AT', 'AZ', 'BS', 'BH', 'BD', 'BB', 'BY', 'BE', 'BZ', 'BJ', 'BM', 'BT', 'BO', 'BA', 'BW', 'BR', 'IO', 'VG', 'BN', 'BG', 'BF', 'BI', 'KH', 'CM', 'CA', 'CV', 'KY', 'CF', 'TD', 'CL', 'CN', 'CX', 'CC', 'CO', 'KM', 'CK', 'CR', 'HR', 'CU', 'CW', 'CY', 'CZ', 'CD', 'DK', 'DJ', 'DM', 'DO', 'TL', 'EC', 'EG', 'SV', 'GQ', 'ER', 'EE', 'ET', 'FK', 'FO', 'FJ', 'FI', 'FR', 'PF', 'GA', 'GM', 'GE', 'DE', 'GH', 'GI', 'GR', 'GL', 'GD', 'GU', 'GT', 'GG', 'GN', 'GW', 'GY', 'HT', 'HN', 'HK', 'HU', 'IS', 'IN', 'ID', 'IR', 'IQ', 'IE', 'IM', 'IL', 'IT', 'CI', 'JM', 'JP', 'JE', 'JO', 'KZ', 'KE', 'KI', 'XK', 'KW', 'KG', 'LA', 'LV', 'LB', 'LS', 'LR', 'LY', 'LI', 'LT', 'LU', 'MO', 'MK', 'MG', 'MW', 'MY', 'MV', 'ML', 'MT', 'MH', 'MR', 'MU', 'YT', 'MX', 'FM', 'MD', 'MC', 'MN', 'ME', 'MS', 'MA', 'MZ', 'MM', 'NA', 'NR', 'NP', 'NL', 'AN', 'NC', 'NZ', 'NI', 'NE', 'NG', 'NU', 'KP', 'MP', 'NO', 'OM', 'PK', 'PW', 'PS', 'PA', 'PG', 'PY', 'PE', 'PH', 'PN', 'PL', 'PT', 'PR', 'QA', 'CG', 'RE', 'RO', 'RU', 'RW', 'BL', 'SH', 'KN', 'LC', 'MF', 'PM', 'VC', 'WS', 'SM', 'ST', 'SA', 'SN', 'RS', 'SC', 'SL', 'SG', 'SX', 'SK', 'SI', 'SB', 'SO', 'ZA', 'KR', 'SS', 'ES', 'LK', 'SD', 'SR', 'SJ', 'SZ', 'SE', 'CH', 'SY', 'TW', 'TJ', 'TZ', 'TH', 'TG', 'TK', 'TO', 'TT', 'TN', 'TR', 'TM', 'TC', 'TV', 'VI', 'UG', 'UA', 'AE', 'GB', 'US', 'UY', 'UZ', 'VU', 'VA', 'VE', 'VN', 'WF', 'EH', 'YE', 'ZM', 'ZW') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -5450,6 +5719,28 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN UPPER({{ lib.render_target_column('analyzed_table')}}) IN ('ALL', 'AFN', 'ARS', 'AWG', 'AUD', 'AZN', 'BSD', 'BBD', 'BYN', 'BZD', 'BMD', 'BOB', 'BAM', 'BWP', 'BGN', 'BRL', 'BND', 'KHR', 'CAD', 'KYD', 'CLP', 'CNY', 'COP', 'CRC', 'HRK', 'CUP', 'CZK', 'DKK', 'DOP', 'XCD', 'EGP', 'SVC', 'EUR', 'FKP', 'FJD', 'GHS', 'GIP', 'GTQ', 'GGP', 'GYD', 'HNL', 'HKD', 'HUF', 'ISK', 'INR', 'IDR', 'IRR', 'IMP', 'ILS', 'JMD', 'JPY', 'JEP', 'KZT', 'KPW', 'KRW', 'KGS', 'LAK', 'LBP', 'LRD', 'MKD', 'MYR', 'MUR', 'MXN', 'MNT', 'MZN', 'NAD', 'NPR', 'ANG', 'NZD', 'NIO', 'NGN', 'NOK', 'OMR', 'PKR', 'PAB', 'PYG', 'PEN', 'PHP', 'PLN', 'QAR', 'RON', 'RUB', 'SHP', 'SAR', 'RSD', 'SCR', 'SGD', 'SBD', 'SOS', 'ZAR', 'LKR', 'SEK', 'CHF', 'SRD', 'SYP', 'TWD', 'THB', 'TTD', 'TRY', 'TVD', 'UAH', 'AED', 'GBP', 'USD', 'UYU', 'UZS', 'VEF', 'VND', 'YER', 'ZWD', 'LEK', '؋', '$', 'Ƒ', '₼', 'BR', 'BZ$', '$B', 'KM', 'P', 'ЛВ', 'R$', '៛', '¥', '₡', 'KN', '₱', 'KČ', 'KR', 'RD$', '£', '€', '¢', 'Q', 'L', 'FT', '₹', 'RP', '﷼', '₪', 'J$', '₩', '₭', 'ДЕН', 'RM', '₨', '₮', 'د.إ', 'MT', 'C$', '₦', 'B/.', 'GS', 'S/.', 'ZŁ', 'LEI', 'ДИН.', 'S', 'R', 'NT$', '฿', 'TT$', '₺', '₴', '$U', 'BS', '₫', 'Z$') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/column/accuracy-column-sensors.md b/docs/reference/sensors/column/accuracy-column-sensors.md index edb8fd5a83..0e9c4dca17 100644 --- a/docs/reference/sensors/column/accuracy-column-sensors.md +++ b/docs/reference/sensors/column/accuracy-column-sensors.md @@ -373,6 +373,28 @@ The templates used to generate the SQL query for each data source supported by D FROM {{ lib.render_target_table() }} AS analyzed_table {{- lib.render_where_clause() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + AVG(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` === "Trino" ```sql+jinja @@ -751,6 +773,28 @@ The templates used to generate the SQL query for each data source supported by D {%- endif -%} {%- endmacro -%} + SELECT + (SELECT + MAX(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + SELECT (SELECT MAX(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) @@ -1140,6 +1184,28 @@ The templates used to generate the SQL query for each data source supported by D {%- endif -%} {%- endmacro -%} + SELECT + (SELECT + MIN(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + SELECT (SELECT MIN(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) @@ -1538,6 +1604,28 @@ The templates used to generate the SQL query for each data source supported by D FROM {{ lib.render_target_table() }} AS analyzed_table {{- lib.render_where_clause() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + COUNT(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + COUNT({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` === "Trino" ```sql+jinja @@ -1928,6 +2016,28 @@ The templates used to generate the SQL query for each data source supported by D {%- endif -%} {%- endmacro -%} + SELECT + (SELECT + SUM(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + SELECT (SELECT SUM(referenced_table.{{ lib.quote_identifier(parameters.referenced_column) }}) diff --git a/docs/reference/sensors/column/bool-column-sensors.md b/docs/reference/sensors/column/bool-column-sensors.md index ffb4e9801d..34f00de5bf 100644 --- a/docs/reference/sensors/column/bool-column-sensors.md +++ b/docs/reference/sensors/column/bool-column-sensors.md @@ -407,6 +407,28 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -838,6 +860,28 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} = 1 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/column/conversions-column-sensors.md b/docs/reference/sensors/column/conversions-column-sensors.md index 8c198b60bb..562cb3ee94 100644 --- a/docs/reference/sensors/column/conversions-column-sensors.md +++ b/docs/reference/sensors/column/conversions-column-sensors.md @@ -407,6 +407,28 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('true', 'false', 't', 'f', 'y', 'n', 'yes', 'no', '1', '0') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -980,6 +1002,39 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((31(\/|-|\.)(0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec)))(\/|-|\.)|((29|30)(\/|-|\.)(0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec))(\/|-|\.)))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(29(\/|-|\.)(0?2|(Feb))(\/|-|\.)(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(0?[1-9]|1\d|2[0-8])(\/|-|\.)((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.|[ ])31)(([,]?[ ]?)|(\/|-|\.))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.|[ ])(29|30))(([,]?[ ]?)|(\/|-|\.))))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((0?2|(Feb)(\/|-|\.|[ ])29)(([,]?[ ]?)|(\/|-|\.))(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.|[ ])(0?[1-9]|1\d|2[0-8]))(([,]?[ ]?)|(\/|-|\.))((1[6-9]|[2-9]\d)?\d{2})$') IS NOT NULL + OR + REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[13578]|1[02]|(Jan|Mar|May|Jul|Aug|Oct|Dec))(\/|-|\.)(31))|((0?[1,3-9]|1[0-2]|(Jan|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)(\/|-|\.)(29|30))))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^((1[6-9]|[2-9]\d)?\d{2})(\/|-|\.)(((0?[1-9]|(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep))|(1[0-2]|(Oct|Nov|Dec)))(\/|-|\.)(0?[1-9]|1\d|2[0-8]))$') IS NOT NULL + OR REGEXP_SUBSTR(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096)), '^(((1[6-9]|[2-9]\d)?(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))(\/|-|\.)((0?2|(Feb)(\/|-|\.)(29)))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1358,6 +1413,24 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{ lib.render_data_grouping_projections('analyzed_table') }} + {{ lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + {{ lib.render_group_by() }} + {{ lib.render_order_by() }} + ``` === "Trino" ```sql+jinja @@ -1726,6 +1799,24 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * COUNT( + TRYCAST({{ lib.render_target_column('analyzed_table') }} AS INTEGER) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/column/custom_sql-column-sensors.md b/docs/reference/sensors/column/custom_sql-column-sensors.md index bc45ca95ae..af42ad532f 100644 --- a/docs/reference/sensors/column/custom_sql-column-sensors.md +++ b/docs/reference/sensors/column/custom_sql-column-sensors.md @@ -166,6 +166,14 @@ The templates used to generate the SQL query for each data source supported by D | replace('{schema_name}', target_table.schema_name) | replace('{column_name}', column_name) }} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {{ parameters.sql_query | replace('{table_name}', target_table.table_name) + | replace('{schema_name}', target_table.schema_name) + | replace('{column_name}', column_name) }} + ``` === "Trino" ```sql+jinja @@ -459,6 +467,20 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -876,6 +898,27 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND NOT ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1347,6 +1390,30 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT ({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND NOT ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table')}}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1774,6 +1841,27 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -2244,6 +2332,30 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND ({{ parameters.sql_condition | replace('{column}', lib.render_target_column('analyzed_table')) | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/column/datatype-column-sensors.md b/docs/reference/sensors/column/datatype-column-sensors.md index 4b7db5bf73..c898fd2add 100644 --- a/docs/reference/sensors/column/datatype-column-sensors.md +++ b/docs/reference/sensors/column/datatype-column-sensors.md @@ -1588,6 +1588,94 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN NULL + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 1 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?[0-9]*[.,]?[0-9]+$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 2 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 3 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 4 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 5 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 1 + ELSE 0 + END + ) + THEN 6 + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[-+]?\d+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^[+-]?([0-9]*[.])[0-9]+$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4}))$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4}))$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01]))$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[/](0[1-9]|1[0-2])[/](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[-](0[1-9]|1[0-2])[-](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((0[1-9]|[1][0-9]|[2][0-9]|3[01])[.](0[1-9]|1[0-2])[.](\d{4})[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[/](0[1-9]|1[0-2])[/](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$|^((\d{4})[.](0[1-9]|1[0-2])[.](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?(\b(am|pm|AM|PM)\b)?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^((\d{4})[-](0[1-9]|1[0-2])[-](0[1-9]|[1][0-9]|[2][0-9]|3[01])[\s]?[T]?[\s]?([0]|2[0-3]|[01][0-9])[:]([0-5][0-9])[:]([0-5][0-9])[\s]?([.]\d{0,12})?[\s]?((GMT)|(UTC))?(([-+]\d{2}[:]?(\d{2})?)|[zZ])?)$') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), '^(\b(true|false|TRUE|FALSE|yes|no|YES|NO|y|n|Y|N|t|f|T|F)\b)$') IS NOT NULL + THEN 0 + WHEN TRIM(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) <> '' + THEN 1 + ELSE 0 + END + ) + THEN 7 + ELSE 8 + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/column/datetime-column-sensors.md b/docs/reference/sensors/column/datetime-column-sensors.md index fc9c0908cc..27d3255ac2 100644 --- a/docs/reference/sensors/column/datetime-column-sensors.md +++ b/docs/reference/sensors/column/datetime-column-sensors.md @@ -431,6 +431,28 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- render_ordering_column_names() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_date_format_cast() }} >= {{ lib.make_text_constant(parameters.min_date) }} AND {{ lib.render_date_format_cast() }} <= {{ lib.make_text_constant(parameters.max_date) }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1031,6 +1053,38 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + {% if lib.is_instant(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% elif lib.is_local_date(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > (CURRENT_DATE + INTERVAL {{((parameters.max_future_days) * 1) | int}} DAY) + {% elif lib.is_local_date_time(table.columns[column_name].type_snapshot.column_type) == 'true' -%} + {{ lib.render_target_column('analyzed_table') }} > CURRENT_DATETIME() + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% else -%} + CAST({{ lib.render_target_column('analyzed_table') }} AS TIMESTAMP) > CURRENT_TIMESTAMP + INTERVAL {{((parameters.max_future_days) * 86400) | int}} SECOND + {% endif -%} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1507,6 +1561,30 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), {{lib.render_date_format_regex(parameters.date_format)}}) IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/column/integrity-column-sensors.md b/docs/reference/sensors/column/integrity-column-sensors.md index 3e41f1bceb..a584ba85c6 100644 --- a/docs/reference/sensors/column/integrity-column-sensors.md +++ b/docs/reference/sensors/column/integrity-column-sensors.md @@ -400,6 +400,28 @@ The templates used to generate the SQL query for each data source supported by D ```sql+jinja {% import '/dialects/sqlserver.sql.jinja2' as lib with context -%} + SELECT + 100.0 * SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 0 + ELSE 1 + END + ) / COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT 100.0 * SUM( CASE @@ -866,6 +888,28 @@ The templates used to generate the SQL query for each data source supported by D ```sql+jinja {% import '/dialects/sqlserver.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} IS NULL AND {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + LEFT OUTER JOIN {{ lib.render_referenced_table(parameters.foreign_table) }} AS foreign_table + ON {{ lib.render_target_column('analyzed_table')}} = foreign_table.{{ lib.quote_identifier(parameters.foreign_column) }} + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT SUM( CASE diff --git a/docs/reference/sensors/column/nulls-column-sensors.md b/docs/reference/sensors/column/nulls-column-sensors.md index e1fe2c141b..b710a107bf 100644 --- a/docs/reference/sensors/column/nulls-column-sensors.md +++ b/docs/reference/sensors/column/nulls-column-sensors.md @@ -275,6 +275,20 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -602,6 +616,22 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 0.0 + ELSE 100.0 * COUNT({{ lib.render_target_column('analyzed_table') }}) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -967,6 +997,24 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1380,6 +1428,27 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NULL THEN 1 + ELSE 0 + END + ) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/column/numeric-column-sensors.md b/docs/reference/sensors/column/numeric-column-sensors.md index 95a127078f..11fb4903e9 100644 --- a/docs/reference/sensors/column/numeric-column-sensors.md +++ b/docs/reference/sensors/column/numeric-column-sensors.md @@ -401,6 +401,27 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -772,6 +793,24 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -90.0 OR {{ lib.render_target_column('analyzed_table') }} > 90.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1139,6 +1178,24 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} < -180.0 OR {{ lib.render_target_column('analyzed_table') }} > 180.0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1426,6 +1483,19 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1791,6 +1861,24 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -2203,6 +2291,27 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -2575,6 +2684,24 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} >= 0 THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -2988,6 +3115,27 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= 0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -3368,6 +3516,24 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -3787,6 +3953,27 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} > {{(parameters.max_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -4167,6 +4354,24 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -4586,6 +4791,27 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} < {{(parameters.min_value)}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -5010,6 +5236,27 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= {{ parameters.min_value }} AND {{ lib.render_target_column('analyzed_table') }} <= {{ parameters.max_value }} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -5542,6 +5789,21 @@ The templates used to generate the SQL query for each data source supported by D ORDER BY {{render_time_period_columns()}} {{- lib.render_data_grouping_projections('analyzed_table', set_leading_comma=(lib.time_series is not none)) }} {%- endif -%} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + PERCENTILE_CONT({{ parameters.percentile_value }}) + WITHIN GROUP (ORDER BY {{ lib.render_target_column('analyzed_table')}} * 1.0) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -5859,6 +6121,19 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -6141,6 +6416,19 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_POP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -6423,6 +6711,19 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + STDDEV_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -6706,6 +7007,19 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + VAR_SAMP({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -6988,6 +7302,19 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -7396,6 +7723,27 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -90.0 AND {{ lib.render_target_column('analyzed_table') }} <= 90.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -7812,6 +8160,27 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} >= -180.0 AND {{ lib.render_target_column('analyzed_table') }} <= 180.0 THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/column/patterns-column-sensors.md b/docs/reference/sensors/column/patterns-column-sensors.md index eff8e34adc..9c5570a994 100644 --- a/docs/reference/sensors/column/patterns-column-sensors.md +++ b/docs/reference/sensors/column/patterns-column-sensors.md @@ -367,6 +367,26 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -819,6 +839,30 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1215,6 +1259,26 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1602,6 +1666,26 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -2012,6 +2096,28 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -2472,6 +2578,31 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -2879,6 +3010,28 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -3332,6 +3485,31 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '^[0-9]{5}(?:-[0-9]{4})?$' + ) IS NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -3725,6 +3903,26 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -4158,6 +4356,29 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^[0-9a-fA-F]{8}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{4}[\s-]?[0-9a-fA-F]{12}$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -4623,6 +4844,30 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -5090,6 +5335,30 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL + AND REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{lib.render_date_format_regex(parameters.date_format)}}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -5526,6 +5795,29 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, '^(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''])|([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{1})([.])(\s?))([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})([\s-''.]?([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([\s-''.]?)(([a-zA-ZżźćńółęąśäöüåáéěíôúůýčďťĺňŕřšžçâêîôûàèìòùëïãõŻŹĆŃÓŁĘĄŚÄÖÜÅÁÉĚÍÔÚŮÝČĎŤĹŇŔŘŠŽÇÂÊÎÔÛÀÈÌÒÙËÏÃÕ]{2,})?([.])?))?$') IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -5982,6 +6274,30 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -6439,6 +6755,30 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table') }} IS NOT NULL AND + REGEXP_SUBSTR({{ lib.render_target_column('analyzed_table') }}, {{ lib.render_regex(parameters.regex) }}) IS NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/column/pii-column-sensors.md b/docs/reference/sensors/column/pii-column-sensors.md index ef2a7014ab..5f4020d737 100644 --- a/docs/reference/sensors/column/pii-column-sensors.md +++ b/docs/reference/sensors/column/pii-column-sensors.md @@ -430,6 +430,29 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(?:^|[ \t.,:;\"''`|\n\r])[a-zA-Z0-9.!#$%&''*+\/=?^_`{|}~-]{0,63}[a-zA-Z0-9!#$%&''*+\/=?^_`{|}~-]@[a-zA-Z0-9-.]+[.][a-zA-Z]{2,4}(?:[ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -883,6 +906,29 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])[.]){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9][0-9]|[0-9])([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1400,6 +1446,32 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL OR + REGEXP_SUBSTR(CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[a-f0-9A-F]{1,4}:([a-f0-9A-F]{1,4}:|:[a-f0-9A-F]{1,4}):([a-f0-9A-F]{1,4}:){0,5}([a-f0-9A-F]{1,4}){0,1}([ \t.,:;\"''`|\n\r]|$)') IS NOT NULL + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1875,6 +1947,30 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])((((\(\+1\)|(\+1)|(\([0][0][1]\)|([0][0][1]))|\(1/)|(1))[\s.-]?)?(\(?\d{3}\)?[\s.-]?)(\d{3}[\s.-]?)(\d{4})))([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -2339,6 +2435,30 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN REGEXP_SUBSTR( + CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096)), + '(^|[ \t.,:;\"''`|\n\r])[0-9]{5}(?:-[0-9]{4})?([ \t.,:;\"''`|\n\r]|$)' + ) IS NOT NULL THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/column/range-column-sensors.md b/docs/reference/sensors/column/range-column-sensors.md index ada790bbb4..7f6f659954 100644 --- a/docs/reference/sensors/column/range-column-sensors.md +++ b/docs/reference/sensors/column/range-column-sensors.md @@ -266,6 +266,19 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -548,6 +561,19 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN({{ lib.render_target_column('analyzed_table')}}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/column/sampling-column-sensors.md b/docs/reference/sensors/column/sampling-column-sensors.md index 6fdc9a3199..2695c43051 100644 --- a/docs/reference/sensors/column/sampling-column-sensors.md +++ b/docs/reference/sensors/column/sampling-column-sensors.md @@ -482,6 +482,34 @@ The templates used to generate the SQL query for each data source supported by D WHERE sample_table.sample_index <= {{ parameters.limit }} ORDER BY sample_index DESC ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + WITH column_samples AS ( + SELECT + unlimited_samples.sample_value AS sample_value, + unlimited_samples.sample_count AS sample_count, + ROW_NUMBER() OVER (ORDER BY unlimited_samples.sample_count DESC) AS sample_index + FROM + ( + SELECT + {{ lib.render_target_column('analyzed_table') }} AS sample_value, + COUNT(*) AS sample_count + FROM + {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(table_alias_prefix = 'analyzed_table', indentation = ' ') }} + GROUP BY sample_value + ) AS unlimited_samples + ) + SELECT + sample_table.sample_value AS actual_value, + sample_table.sample_count AS sample_count, + sample_table.sample_index AS sample_index + FROM column_samples AS sample_table + WHERE sample_table.sample_index <= {{ parameters.limit }} + ORDER BY sample_index DESC + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/column/text-column-sensors.md b/docs/reference/sensors/column/text-column-sensors.md index d9ae6496da..a36a14a2fc 100644 --- a/docs/reference/sensors/column/text-column-sensors.md +++ b/docs/reference/sensors/column/text-column-sensors.md @@ -301,6 +301,21 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -621,6 +636,21 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) - LENGTH( OREPLACE(TRIM({{lib.render_target_column('analyzed_table')}}), ' ', '') ) + CASE WHEN LENGTH( TRIM({{lib.render_target_column('analyzed_table')}}) ) > 0 THEN 1 ELSE 0 END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1017,6 +1047,25 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1465,6 +1514,29 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) > {{(parameters.max_length)}} + THEN 1 + ELSE 0 + END + )/ COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1869,6 +1941,25 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -2316,6 +2407,28 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LENGTH(CAST({{ lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) < {{(parameters.min_length)}} + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -2764,6 +2877,28 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 100.0 + ELSE + 100.0 * SUM( + CASE + WHEN LENGTH( CAST({{ lib.render_target_column('analyzed_table') }} AS VARCHAR(4096))) BETWEEN {{parameters.min_length}} AND {{parameters.max_length}} THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -3096,6 +3231,21 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MAX( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -3422,6 +3572,21 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + AVG( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -3748,6 +3913,21 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + MIN( + LENGTH(CAST({{lib.render_target_column('analyzed_table')}} AS VARCHAR(4096))) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/column/uniqueness-column-sensors.md b/docs/reference/sensors/column/uniqueness-column-sensors.md index c162b5fde3..c497dae30e 100644 --- a/docs/reference/sensors/column/uniqueness-column-sensors.md +++ b/docs/reference/sensors/column/uniqueness-column-sensors.md @@ -298,6 +298,21 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT( + DISTINCT({{ lib.render_target_column('analyzed_table') }}) + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -644,6 +659,23 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table')}}) = 0 THEN 100.0 + ELSE 100.0 * COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -946,6 +978,20 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT({{ lib.render_target_column('analyzed_table') }})) + AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1307,6 +1353,24 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * ( + COUNT({{ lib.render_target_column('analyzed_table') }}) - COUNT(DISTINCT {{ lib.render_target_column('analyzed_table') }}) + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/column/whitespace-column-sensors.md b/docs/reference/sensors/column/whitespace-column-sensors.md index 9c327429c9..9cd6844738 100644 --- a/docs/reference/sensors/column/whitespace-column-sensors.md +++ b/docs/reference/sensors/column/whitespace-column-sensors.md @@ -386,6 +386,26 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -838,6 +858,29 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) = 0 + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1242,6 +1285,26 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table') }}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1687,6 +1750,28 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN LOWER({{ lib.render_target_column('analyzed_table')}}) IN ('null', 'undefined', 'missing', 'nan', 'none', 'na', 'n/a', 'empty', '#n/d', 'blank', '""', '''''', '-', '') + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -2115,6 +2200,27 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -2591,6 +2697,31 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ lib.render_target_column('analyzed_table')}}) IS NOT NULL + AND TRIM({{ lib.render_target_column('analyzed_table')}}) <> '' + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> LENGTH(TRIM({{ lib.render_target_column('analyzed_table')}})) + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -3020,6 +3151,27 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -3493,6 +3645,30 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT({{ lib.render_target_column('analyzed_table') }}) = 0 THEN 0.0 + ELSE 100.0 * SUM( + CASE + WHEN {{ lib.render_target_column('analyzed_table')}} IS NOT NULL + AND LENGTH({{ lib.render_target_column('analyzed_table')}}) <> 0 + AND TRIM({{ lib.render_target_column('analyzed_table')}}) = '' + THEN 1 + ELSE 0 + END + ) / COUNT({{ lib.render_target_column('analyzed_table') }}) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/table/accuracy-table-sensors.md b/docs/reference/sensors/table/accuracy-table-sensors.md index f8552000d4..2852268715 100644 --- a/docs/reference/sensors/table/accuracy-table-sensors.md +++ b/docs/reference/sensors/table/accuracy-table-sensors.md @@ -318,6 +318,28 @@ The templates used to generate the SQL query for each data source supported by D FROM {{ lib.render_target_table() }} AS analyzed_table {{- lib.render_where_clause() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {%- macro render_referenced_table(referenced_table) -%} + {%- if referenced_table.find(".") < 0 -%} + {{ lib.quote_identifier(lib.macro_database_name) }}.{{ lib.quote_identifier(lib.macro_schema_name) }}.{{- lib.quote_identifier(referenced_table) -}} + {%- else -%} + {{ referenced_table }} + {%- endif -%} + {%- endmacro -%} + + SELECT + (SELECT + COUNT(*) + FROM {{ render_referenced_table(parameters.referenced_table) }} AS referenced_table + ) AS expected_value, + COUNT(*) AS actual_value + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/table/availability-table-sensors.md b/docs/reference/sensors/table/availability-table-sensors.md index daa2a82e77..d381c9e41c 100644 --- a/docs/reference/sensors/table/availability-table-sensors.md +++ b/docs/reference/sensors/table/availability-table-sensors.md @@ -360,6 +360,27 @@ The templates used to generate the SQL query for each data source supported by D {{ lib.render_where_clause() }} ) AS tab_scan ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + 0.0 AS actual_value + {{- lib.render_time_dimension_projection('tab_scan') }} + FROM + ( + SELECT + * + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{ lib.render_where_clause() }} + QUALIFY ROW_NUMBER() OVER (ORDER BY 1) = 1 + ) AS tab_scan + {% if lib.time_series is not none -%} + GROUP BY time_period + ORDER BY time_period + {%- endif -%} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/table/custom_sql-table-sensors.md b/docs/reference/sensors/table/custom_sql-table-sensors.md index 9aa31131e1..f83e4398d0 100644 --- a/docs/reference/sensors/table/custom_sql-table-sensors.md +++ b/docs/reference/sensors/table/custom_sql-table-sensors.md @@ -132,6 +132,12 @@ The templates used to generate the SQL query for each data source supported by D {% import '/dialects/sqlserver.sql.jinja2' as lib with context -%} {{ parameters.sql_query | replace('{table_name}', target_table.table_name) | replace('{schema_name}', target_table.schema_name) }} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + {{ parameters.sql_query | replace('{table_name}', target_table.table_name) | replace('{schema_name}', target_table.schema_name) }} + ``` === "Trino" ```sql+jinja @@ -426,6 +432,20 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + ({{ parameters.sql_expression | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -831,6 +851,26 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN NOT ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1272,6 +1312,28 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT (*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN NOT ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1685,6 +1747,26 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + SUM( + CASE + WHEN ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END + ) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -2126,6 +2208,28 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + CASE + WHEN COUNT(*) = 0 THEN 100.0 + ELSE 100.0 * SUM( + CASE + WHEN ({{ parameters.sql_condition | + replace('{table}', lib.render_target_table()) | replace('{alias}', 'analyzed_table') }}) + THEN 1 + ELSE 0 + END) / COUNT(*) + END AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/table/timeliness-table-sensors.md b/docs/reference/sensors/table/timeliness-table-sensors.md index 1ec6dd83cd..1c1a10bad1 100644 --- a/docs/reference/sensors/table/timeliness-table-sensors.md +++ b/docs/reference/sensors/table/timeliness-table-sensors.md @@ -652,6 +652,50 @@ The templates used to generate the SQL query for each data source supported by D {%- endif -%} {%- endmacro -%} + SELECT + {{ render_current_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + SELECT {{ render_current_event_diff() }} AS actual_value {{- lib.render_data_grouping_projections('analyzed_table') }} @@ -1431,6 +1475,65 @@ The templates used to generate the SQL query for each data source supported by D {%- endif -%} {%- endmacro -%} + SELECT + {{ render_ingestion_event_max_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_max_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP) + - CAST(MAX({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + SELECT {{ render_ingestion_event_max_diff() }} AS actual_value {{- lib.render_data_grouping_projections('analyzed_table') }} @@ -2145,6 +2248,50 @@ The templates used to generate the SQL query for each data source supported by D {%- endif -%} {%- endmacro -%} + SELECT + {{ render_current_ingestion_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_current_ingestion_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + DATEDIFF( + CURRENT_DATE, + MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}), + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + ( + EXTRACT(DAY FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CURRENT_TIMESTAMP - CAST(MAX({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}) AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + SELECT {{ render_current_ingestion_diff() }} AS actual_value {{- lib.render_data_grouping_projections('analyzed_table') }} @@ -3003,6 +3150,55 @@ The templates used to generate the SQL query for each data source supported by D {%- endif -%} {%- endmacro -%} + SELECT + {{ render_ingestion_event_diff() }} AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro render_ingestion_event_diff() -%} + {%- if lib.is_instant(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_instant(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- elif lib.is_local_date(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + DATEDIFF( + {{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }}, + {{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} + ) + ) + {%- elif lib.is_local_date_time(table.columns[table.timestamp_columns.ingestion_timestamp_column].type_snapshot.column_type) == 'true' + and lib.is_local_date_time(table.columns[table.timestamp_columns.event_timestamp_column].type_snapshot.column_type) == 'true' -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- else -%} + MAX( + EXTRACT(DAY FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 86400 + + EXTRACT(HOUR FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 3600 + + EXTRACT(MINUTE FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) * 60 + + EXTRACT(SECOND FROM ((CAST({{ lib.render_column(table.timestamp_columns.ingestion_timestamp_column, 'analyzed_table') }} AS TIMESTAMP) - CAST({{ lib.render_column(table.timestamp_columns.event_timestamp_column, 'analyzed_table') }} AS TIMESTAMP)) DAY(4) TO SECOND)) + ) / 24.0 / 3600.0 + {%- endif -%} + {%- endmacro -%} + SELECT {{ render_ingestion_event_diff() }} AS actual_value {{- lib.render_data_grouping_projections('analyzed_table') }} diff --git a/docs/reference/sensors/table/uniqueness-table-sensors.md b/docs/reference/sensors/table/uniqueness-table-sensors.md index bf63139d9a..d50744da5c 100644 --- a/docs/reference/sensors/table/uniqueness-table-sensors.md +++ b/docs/reference/sensors/table/uniqueness-table-sensors.md @@ -559,6 +559,36 @@ The templates used to generate the SQL query for each data source supported by D ) grouping_table {{ render_group_by('grouping_table') }} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE + WHEN SUM(duplicated_count) IS NULL THEN 0 + ELSE SUM(CASE WHEN duplicated_count > 1 THEN 1 ELSE 0 END) + END AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS duplicated_count + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja @@ -1149,6 +1179,36 @@ The templates used to generate the SQL query for each data source supported by D ) grouping_table {{ render_group_by('grouping_table') }} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + + {% macro extract_in_list(values_list, column_prefix = none, column_suffix = none, separate_by_comma = false) %} + {%- set column_names = table.columns if values_list is none or (values_list | length()) == 0 else values_list -%} + {%- for item in column_names -%} + {{ (column_prefix) if column_prefix is not none -}} {{- lib.quote_identifier(item) -}} {{- (column_suffix) if column_suffix is not none -}} {{- ", " if not loop.last }} {{- "', ', " if separate_by_comma and not loop.last }} + {%- endfor -%} + {% endmacro %} + + SELECT + CASE WHEN SUM(distinct_records) IS NULL THEN 0 + ELSE (1 - SUM(distinct_records) * 1.0 / SUM(records_number)) * 100.0 END + AS actual_value + {{- lib.render_data_grouping_projections_reference('grouping_table') }} + {{- lib.render_time_dimension_projection_reference('grouping_table') }} + FROM ( + SELECT COUNT(*) AS records_number, + COUNT(*) OVER (PARTITION BY {{ extract_in_list(parameters.columns) -}} ) AS distinct_records + {{- lib.render_data_grouping_projections('analyzed_table', indentation=' ') }} + {{- lib.render_time_dimension_projection('analyzed_table', indentation=' ') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause(indentation=' ', extra_filter = 'COALESCE(' ~ extract_in_list(parameters.columns, column_prefix='CAST(', column_suffix=' AS VARCHAR(4096))') ~ ') IS NOT NULL') }} + GROUP BY {{ extract_in_list(parameters.columns) -}} {{- (", " ~ lib.render_grouping_column_names()) if (lib.data_groupings is not none and (lib.data_groupings | length()) > 0) or lib.time_series is not none }} + ) grouping_table + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/sensors/table/volume-table-sensors.md b/docs/reference/sensors/table/volume-table-sensors.md index cd4c45a891..2877066c33 100644 --- a/docs/reference/sensors/table/volume-table-sensors.md +++ b/docs/reference/sensors/table/volume-table-sensors.md @@ -262,6 +262,19 @@ The templates used to generate the SQL query for each data source supported by D {{- lib.render_group_by() -}} {{- lib.render_order_by() -}} ``` +=== "Teradata" + + ```sql+jinja + {% import '/dialects/teradata.sql.jinja2' as lib with context -%} + SELECT + COUNT(*) AS actual_value + {{- lib.render_data_grouping_projections('analyzed_table') }} + {{- lib.render_time_dimension_projection('analyzed_table') }} + FROM {{ lib.render_target_table() }} AS analyzed_table + {{- lib.render_where_clause() -}} + {{- lib.render_group_by() -}} + {{- lib.render_order_by() -}} + ``` === "Trino" ```sql+jinja diff --git a/docs/reference/yaml/ConnectionYaml.md b/docs/reference/yaml/ConnectionYaml.md index e24f914308..b556142367 100644 --- a/docs/reference/yaml/ConnectionYaml.md +++ b/docs/reference/yaml/ConnectionYaml.md @@ -29,7 +29,7 @@ The structure of this object is described below | Property name | Description                     | Data type | Enum values | Default value | Sample values | |---------------|---------------------------------|-----------|-------------|---------------|---------------| -|`provider_type`|Database provider type (required).|*enum*|*bigquery*
*databricks*
*mysql*
*oracle*
*postgresql*
*duckdb*
*presto*
*redshift*
*snowflake*
*spark*
*sqlserver*
*trino*
*hana*
*db2*
*mariadb*
*clickhouse*
*questdb*
| | | +|`provider_type`|Database provider type (required).|*enum*|*bigquery*
*clickhouse*
*databricks*
*db2*
*duckdb*
*hana*
*mariadb*
*mysql*
*oracle*
*postgresql*
*presto*
*questdb*
*redshift*
*snowflake*
*spark*
*sqlserver*
*teradata*
*trino*
| | | |[`bigquery`](./ConnectionYaml.md#bigqueryparametersspec)|BigQuery connection parameters. Specify parameters in the bigquery section.|*[BigQueryParametersSpec](./ConnectionYaml.md#bigqueryparametersspec)*| | | | |[`snowflake`](./ConnectionYaml.md#snowflakeparametersspec)|Snowflake connection parameters. Specify parameters in the snowflake section or set the url (which is the Snowflake JDBC url).|*[SnowflakeParametersSpec](./ConnectionYaml.md#snowflakeparametersspec)*| | | | |[`postgresql`](./ConnectionYaml.md#postgresqlparametersspec)|PostgreSQL connection parameters. Specify parameters in the postgresql section or set the url (which is the PostgreSQL JDBC url).|*[PostgresqlParametersSpec](./ConnectionYaml.md#postgresqlparametersspec)*| | | | @@ -47,6 +47,7 @@ The structure of this object is described below |[`mariadb`](./ConnectionYaml.md#mariadbparametersspec)|MariaDB connection parameters. Specify parameters in the mariadb section or set the url (which is the MariaDB JDBC url).|*[MariaDbParametersSpec](./ConnectionYaml.md#mariadbparametersspec)*| | | | |[`clickhouse`](./ConnectionYaml.md#clickhouseparametersspec)|ClickHouse connection parameters. Specify parameters in the clickhouse section or set the url (which is the ClickHouse JDBC url).|*[ClickHouseParametersSpec](./ConnectionYaml.md#clickhouseparametersspec)*| | | | |[`questdb`](./ConnectionYaml.md#questdbparametersspec)|QuestDB connection parameters. Specify parameters in the questdb section or set the url (which is the QuestDB JDBC url).|*[QuestDbParametersSpec](./ConnectionYaml.md#questdbparametersspec)*| | | | +|[`teradata`](./ConnectionYaml.md#teradataparametersspec)|Teradata connection parameters. Specify parameters in the teradata section or set the url (which is the Teradata JDBC url).|*[TeradataParametersSpec](./ConnectionYaml.md#teradataparametersspec)*| | | | |`parallel_jobs_limit`|The concurrency limit for the maximum number of parallel SQL queries executed on this connection.|*integer*| | | | |[`default_grouping_configuration`](./ConnectionYaml.md#datagroupingconfigurationspec)|Default data grouping configuration for all tables. The configuration may be overridden on table, column and check level. Data groupings are configured in two cases: (1) the data in the table should be analyzed with a GROUP BY condition, to analyze different datasets using separate time series, for example a table contains data from multiple countries and there is a 'country' column used for partitioning. a static dimension is assigned to a table, when the data is partitioned at a table level (similar tables store the same information, but for different countries, etc.). (2) a static dimension is assigned to a table, when the data is partitioned at a table level (similar tables store the same information, but for different countries, etc.). |*[DataGroupingConfigurationSpec](./ConnectionYaml.md#datagroupingconfigurationspec)*| | | | |[`schedules`](./ConnectionYaml.md#cronschedulesspec)|Configuration of the job scheduler that runs data quality checks. The scheduler configuration is divided into types of checks that have different schedules.|*[CronSchedulesSpec](./ConnectionYaml.md#cronschedulesspec)*| | | | @@ -532,6 +533,24 @@ The structure of this object is described below +___ + +## TeradataParametersSpec +Teradata connection parameters. + + +The structure of this object is described below + +| Property name | Description                     | Data type | Enum values | Default value | Sample values | +|---------------|---------------------------------|-----------|-------------|---------------|---------------| +|`host`|Teradata host name. Supports also a ${TERADATA_HOST} configuration with a custom environment variable.|*string*| | | | +|`port`|Teradata port number. The default port is 1025. Supports also a ${TERADATA_PORT} configuration with a custom environment variable.|*string*| | | | +|`user`|Teradata user name. The value can be in the ${ENVIRONMENT_VARIABLE_NAME} format to use dynamic substitution.|*string*| | | | +|`password`|Teradata database password. The value can be in the ${ENVIRONMENT_VARIABLE_NAME} format to use dynamic substitution.|*string*| | | | +|`properties`|A dictionary of custom JDBC parameters that are added to the JDBC connection string, a key/value dictionary.|*Dict[string, string]*| | | | + + + ___ ## DataGroupingConfigurationSpec diff --git a/docs/reference/yaml/SensorDefinitionYaml.md b/docs/reference/yaml/SensorDefinitionYaml.md index f18ffbfbb0..6516076c53 100644 --- a/docs/reference/yaml/SensorDefinitionYaml.md +++ b/docs/reference/yaml/SensorDefinitionYaml.md @@ -65,7 +65,7 @@ The structure of this object is described below |`display_name`|Field display name that should be shown as a label for the control.|*string*| | | | |`help_text`|Help text (full description) that will be shown to the user as a hint when the cursor is moved over the control.|*string*| | | | |`data_type`|Parameter data type.|*enum*|*string*
*boolean*
*integer*
*long*
*double*
*date*
*datetime*
*column_name*
*enum*
*string_list*
*integer_list*
*object*
| | | -|`display_hint`|UI control display hint.|*enum*|*textarea*
*column_names*
| | | +|`display_hint`|UI control display hint.|*enum*|*textarea*
*column_names*
*requires_paid_version*
| | | |`required`|True when the value for the parameter must be provided.|*boolean*| | | | |`allowed_values`|List of allowed values for a field that is of an enum type.|*List[string]*| | | | |`default_value`|The default value for a parameter in a custom check or a custom rule.|*string*| | | | diff --git a/dqops/src/main/resources/static/swagger-api/dqops-api-openapi-3.json b/dqops/src/main/resources/static/swagger-api/dqops-api-openapi-3.json index 4100c54a60..c80ef78bb6 100644 --- a/dqops/src/main/resources/static/swagger-api/dqops-api-openapi-3.json +++ b/dqops/src/main/resources/static/swagger-api/dqops-api-openapi-3.json @@ -25707,13 +25707,13 @@ "properties" : { "check_search_filters" : { "description" : "Filters addressing basic tree search parameters. These filters takes precedence over other selectors.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "check_model_patch" : { "description" : "Sample configured check model which will pasted onto selected checks.", - "$ref" : "#/components/schemas/CheckModel", - "originalRef" : "#/components/schemas/CheckModel" + "originalRef" : "#/components/schemas/CheckModel", + "$ref" : "#/components/schemas/CheckModel" }, "selected_tables_to_columns" : { "type" : "object", @@ -26416,8 +26416,8 @@ }, "dashboard" : { "description" : "Dashboard model with an unauthenticated url", - "$ref" : "#/components/schemas/DashboardSpec", - "originalRef" : "#/components/schemas/DashboardSpec" + "originalRef" : "#/components/schemas/DashboardSpec", + "$ref" : "#/components/schemas/DashboardSpec" }, "authenticated_dashboard_url" : { "type" : "string", @@ -26465,8 +26465,8 @@ }, "schedule" : { "description" : "Schedule for importing source tables using a CRON scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" } } } @@ -26710,8 +26710,8 @@ "properties" : { "check_search_filters" : { "description" : "Filters addressing basic tree search parameters. These filters takes precedence over other selectors.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "selected_tables_to_columns" : { "type" : "object", @@ -27288,8 +27288,8 @@ "type" : "array", "description" : "List of fields for editing the sensor parameters.", "items" : { - "$ref" : "#/components/schemas/FieldModel", - "originalRef" : "#/components/schemas/FieldModel" + "originalRef" : "#/components/schemas/FieldModel", + "$ref" : "#/components/schemas/FieldModel" } }, "table_level_filter" : { @@ -27302,18 +27302,18 @@ }, "warning" : { "description" : "Rule parameters for the warning severity rule.", - "$ref" : "#/components/schemas/RuleParametersModel", - "originalRef" : "#/components/schemas/RuleParametersModel" + "originalRef" : "#/components/schemas/RuleParametersModel", + "$ref" : "#/components/schemas/RuleParametersModel" }, "error" : { "description" : "Rule parameters for the error severity rule.", - "$ref" : "#/components/schemas/RuleParametersModel", - "originalRef" : "#/components/schemas/RuleParametersModel" + "originalRef" : "#/components/schemas/RuleParametersModel", + "$ref" : "#/components/schemas/RuleParametersModel" }, "fatal" : { "description" : "Rule parameters for the fatal severity rule.", - "$ref" : "#/components/schemas/RuleParametersModel", - "originalRef" : "#/components/schemas/RuleParametersModel" + "originalRef" : "#/components/schemas/RuleParametersModel", + "$ref" : "#/components/schemas/RuleParametersModel" }, "disabled" : { "type" : "boolean", @@ -27374,8 +27374,8 @@ "type" : "array", "description" : "Simplistic list of all data quality checks.", "items" : { - "$ref" : "#/components/schemas/CheckListModel", - "originalRef" : "#/components/schemas/CheckListModel" + "originalRef" : "#/components/schemas/CheckListModel", + "$ref" : "#/components/schemas/CheckListModel" } }, "can_edit" : { @@ -27467,14 +27467,14 @@ "type" : "array", "description" : "List of all data quality categories that contain data quality checks inside.", "items" : { - "$ref" : "#/components/schemas/QualityCategoryModel", - "originalRef" : "#/components/schemas/QualityCategoryModel" + "originalRef" : "#/components/schemas/QualityCategoryModel", + "$ref" : "#/components/schemas/QualityCategoryModel" } }, "effective_schedule" : { "description" : "Model of configured schedule enabled on the check container.", - "$ref" : "#/components/schemas/EffectiveScheduleModel", - "originalRef" : "#/components/schemas/EffectiveScheduleModel" + "originalRef" : "#/components/schemas/EffectiveScheduleModel", + "$ref" : "#/components/schemas/EffectiveScheduleModel" }, "effective_schedule_enabled_status" : { "type" : "string", @@ -27487,13 +27487,13 @@ }, "run_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to start the job.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "data_clean_job_template" : { "description" : "Configured parameters for the \"data clean\" job that after being supplied with a time range should be pushed to the job queue in order to remove stored results connected with this check container", - "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters", - "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters" + "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters", + "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters" }, "can_edit" : { "type" : "boolean", @@ -27759,16 +27759,16 @@ "type" : "object", "description" : "A dictionary of nested folders with data quality checks. The keys are the folder names.", "additionalProperties" : { - "$ref" : "#/definitions/CheckDefinitionFolderModel", - "originalRef" : "#/definitions/CheckDefinitionFolderModel" + "originalRef" : "#/definitions/CheckDefinitionFolderModel", + "$ref" : "#/definitions/CheckDefinitionFolderModel" } }, "checks" : { "type" : "array", "description" : "List of data quality checks defined in this folder.", "items" : { - "$ref" : "#/components/schemas/CheckDefinitionListModel", - "originalRef" : "#/components/schemas/CheckDefinitionListModel" + "originalRef" : "#/components/schemas/CheckDefinitionListModel", + "$ref" : "#/components/schemas/CheckDefinitionListModel" } } }, @@ -28496,21 +28496,21 @@ }, "table_checks" : { "description" : "Proposed configuration of table-level data quality checks, such as volume, timeliness or schema.", - "$ref" : "#/components/schemas/CheckContainerModel", - "originalRef" : "#/components/schemas/CheckContainerModel" + "originalRef" : "#/components/schemas/CheckContainerModel", + "$ref" : "#/components/schemas/CheckContainerModel" }, "column_checks" : { "type" : "object", "description" : "Dictionary of proposed data quality checks for each column.", "additionalProperties" : { - "$ref" : "#/definitions/CheckContainerModel", - "originalRef" : "#/definitions/CheckContainerModel" + "originalRef" : "#/definitions/CheckContainerModel", + "$ref" : "#/definitions/CheckContainerModel" } }, "run_checks_job" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order run checks for the table.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" } }, "description" : "Model that has a proposed configuration of checks on a table and its columns generated by a data quality check mining service." @@ -28797,8 +28797,8 @@ "type" : "array", "description" : "List of fields for editing the sensor parameters.", "items" : { - "$ref" : "#/components/schemas/FieldModel", - "originalRef" : "#/components/schemas/FieldModel" + "originalRef" : "#/components/schemas/FieldModel", + "$ref" : "#/components/schemas/FieldModel" } }, "sensor_name" : { @@ -28811,8 +28811,8 @@ }, "rule" : { "description" : "Threshold (alerting) rules defined for a check.", - "$ref" : "#/components/schemas/RuleThresholdsModel", - "originalRef" : "#/components/schemas/RuleThresholdsModel" + "originalRef" : "#/components/schemas/RuleThresholdsModel", + "$ref" : "#/components/schemas/RuleThresholdsModel" }, "supports_error_sampling" : { "type" : "boolean", @@ -28837,18 +28837,18 @@ }, "data_grouping_override" : { "description" : "Data grouping configuration for this check. When a data grouping configuration is assigned at a check level, it overrides the data grouping configuration from the table level. Data grouping is configured in two cases: (1) the data in the table should be analyzed with a GROUP BY condition, to analyze different groups of rows using separate time series, for example a table contains data from multiple countries and there is a 'country' column used for partitioning. (2) a static data grouping configuration is assigned to a table, when the data is partitioned at a table level (similar tables store the same information, but for different countries, etc.). ", - "$ref" : "#/components/schemas/DataGroupingConfigurationSpec", - "originalRef" : "#/components/schemas/DataGroupingConfigurationSpec" + "originalRef" : "#/components/schemas/DataGroupingConfigurationSpec", + "$ref" : "#/components/schemas/DataGroupingConfigurationSpec" }, "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "effective_schedule" : { "description" : "Model of configured schedule enabled on the check level.", - "$ref" : "#/components/schemas/EffectiveScheduleModel", - "originalRef" : "#/components/schemas/EffectiveScheduleModel" + "originalRef" : "#/components/schemas/EffectiveScheduleModel", + "$ref" : "#/components/schemas/EffectiveScheduleModel" }, "schedule_enabled_status" : { "type" : "string", @@ -28859,8 +28859,8 @@ "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -28885,13 +28885,13 @@ }, "run_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to start the job.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "data_clean_job_template" : { "description" : "Configured parameters for the \"data clean\" job that after being supplied with a time range should be pushed to the job queue in order to remove stored results connected with this check.", - "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters", - "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters" + "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters", + "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters" }, "data_grouping_configuration" : { "type" : "string", @@ -28921,8 +28921,8 @@ "type" : "array", "description" : "List of similar checks in other check types or in other time scales.", "items" : { - "$ref" : "#/components/schemas/SimilarCheckModel", - "originalRef" : "#/components/schemas/SimilarCheckModel" + "originalRef" : "#/components/schemas/SimilarCheckModel", + "$ref" : "#/components/schemas/SimilarCheckModel" } }, "check_hash" : { @@ -28943,8 +28943,8 @@ "description" : "Boolean flag that decides if the current user can delete data (results)." }, "similar_profiling_check" : { - "$ref" : "#/components/schemas/SimilarCheckModel", - "originalRef" : "#/components/schemas/SimilarCheckModel" + "originalRef" : "#/components/schemas/SimilarCheckModel", + "$ref" : "#/components/schemas/SimilarCheckModel" } }, "description" : "Model that returns the form definition and the form data to edit a single data quality check." @@ -29405,8 +29405,8 @@ "type" : "array", "description" : "Single check results", "items" : { - "$ref" : "#/components/schemas/CheckResultEntryModel", - "originalRef" : "#/components/schemas/CheckResultEntryModel" + "originalRef" : "#/components/schemas/CheckResultEntryModel", + "$ref" : "#/components/schemas/CheckResultEntryModel" } } } @@ -29842,8 +29842,8 @@ "checkHierarchyIdsModels" : { "type" : "array", "items" : { - "$ref" : "#/components/schemas/HierarchyIdModel", - "originalRef" : "#/components/schemas/HierarchyIdModel" + "originalRef" : "#/components/schemas/HierarchyIdModel", + "$ref" : "#/components/schemas/HierarchyIdModel" } } }, @@ -29945,8 +29945,8 @@ }, "check_container_type" : { "description" : "Check type with time-scale.", - "$ref" : "#/components/schemas/CheckContainerTypeModel", - "originalRef" : "#/components/schemas/CheckContainerTypeModel" + "originalRef" : "#/components/schemas/CheckContainerTypeModel", + "$ref" : "#/components/schemas/CheckContainerTypeModel" }, "sensor_name" : { "type" : "string", @@ -29954,23 +29954,23 @@ }, "check_model" : { "description" : "Template of the check model with the sensor parameters and rule parameters", - "$ref" : "#/components/schemas/CheckModel", - "originalRef" : "#/components/schemas/CheckModel" + "originalRef" : "#/components/schemas/CheckModel", + "$ref" : "#/components/schemas/CheckModel" }, "sensor_parameters_definitions" : { "type" : "array", "description" : "List of sensor parameter fields definitions.", "items" : { - "$ref" : "#/components/schemas/ParameterDefinitionSpec", - "originalRef" : "#/components/schemas/ParameterDefinitionSpec" + "originalRef" : "#/components/schemas/ParameterDefinitionSpec", + "$ref" : "#/components/schemas/ParameterDefinitionSpec" } }, "rule_parameters_definitions" : { "type" : "array", "description" : "List of threshold (alerting) rule's parameters definitions (for a single rule, regardless of severity).", "items" : { - "$ref" : "#/components/schemas/ParameterDefinitionSpec", - "originalRef" : "#/components/schemas/ParameterDefinitionSpec" + "originalRef" : "#/components/schemas/ParameterDefinitionSpec", + "$ref" : "#/components/schemas/ParameterDefinitionSpec" } } }, @@ -30263,18 +30263,18 @@ }, "table" : { "description" : "The full physical name (schema.table) of the target table.", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "check_search_filters" : { "description" : "Check search filters that identify data quality checks for which the error samples are collected.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "time_window_filter" : { "description" : "Optional time window filter, configures the time range for partitioned tables that is analyzed to find error samples.", - "$ref" : "#/components/schemas/TimeWindowFilterParameters", - "originalRef" : "#/components/schemas/TimeWindowFilterParameters" + "originalRef" : "#/components/schemas/TimeWindowFilterParameters", + "$ref" : "#/components/schemas/TimeWindowFilterParameters" }, "data_scope" : { "type" : "string", @@ -30287,8 +30287,8 @@ }, "error_sampler_result" : { "description" : "The summary of the error sampling collection job after if finished. Returns the number of error samplers that collected samples, columns analyzed, error samples (values) captured.", - "$ref" : "#/components/schemas/ErrorSamplerResult", - "originalRef" : "#/components/schemas/ErrorSamplerResult" + "originalRef" : "#/components/schemas/ErrorSamplerResult", + "$ref" : "#/components/schemas/ErrorSamplerResult" } } } @@ -30331,13 +30331,13 @@ "properties" : { "check_search_filters" : { "description" : "Check search filters that identify the checks for which the error samples should be collected.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "time_window_filter" : { "description" : "Optional time window filter, configures the time range for partitioned tables that is analyzed to find error samples.", - "$ref" : "#/components/schemas/TimeWindowFilterParameters", - "originalRef" : "#/components/schemas/TimeWindowFilterParameters" + "originalRef" : "#/components/schemas/TimeWindowFilterParameters", + "$ref" : "#/components/schemas/TimeWindowFilterParameters" }, "data_scope" : { "type" : "string", @@ -30350,8 +30350,8 @@ }, "error_sampler_result" : { "description" : "The summary of the error sampling collection job after if finished. Returns the number of error samplers executed, columns analyzed, error samples (values) captured.", - "$ref" : "#/components/schemas/ErrorSamplerResult", - "originalRef" : "#/components/schemas/ErrorSamplerResult" + "originalRef" : "#/components/schemas/ErrorSamplerResult", + "$ref" : "#/components/schemas/ErrorSamplerResult" } }, "description" : "Collect error samples job parameters, specifies the target checks that should be executed to collect error samples and an optional time window." @@ -30383,13 +30383,13 @@ "properties" : { "jobId" : { "description" : "Job id that identifies a job that was started on the DQOps job queue.", - "$ref" : "#/components/schemas/DqoQueueJobId", - "originalRef" : "#/components/schemas/DqoQueueJobId" + "originalRef" : "#/components/schemas/DqoQueueJobId", + "$ref" : "#/components/schemas/DqoQueueJobId" }, "result" : { "description" : "Optional result object that is returned only when the wait parameter was true and the \"collect error samples\" job has finished. Contains the summary result of collecting error samples, including the number of error samplers that were executed, and the number of error samples collected. ", - "$ref" : "#/components/schemas/ErrorSamplerResult", - "originalRef" : "#/components/schemas/ErrorSamplerResult" + "originalRef" : "#/components/schemas/ErrorSamplerResult", + "$ref" : "#/components/schemas/ErrorSamplerResult" }, "status" : { "type" : "string", @@ -30476,13 +30476,13 @@ }, "table" : { "description" : "The full physical name (schema.table) of the target table.", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "statistics_collector_search_filters" : { "description" : "Statistics collectors search filters that identify the type of statistics collector to run.", - "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters", - "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters" + "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters", + "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters" }, "data_scope" : { "type" : "string", @@ -30504,8 +30504,8 @@ }, "collect_statistics_result" : { "description" : "The summary of the statistics collection job after if finished. Returns the number of collectors analyzed, columns analyzed, statistics results captured.", - "$ref" : "#/components/schemas/CollectStatisticsResult", - "originalRef" : "#/components/schemas/CollectStatisticsResult" + "originalRef" : "#/components/schemas/CollectStatisticsResult", + "$ref" : "#/components/schemas/CollectStatisticsResult" } } } @@ -30557,8 +30557,8 @@ "properties" : { "statistics_collector_search_filters" : { "description" : "Statistics collectors search filters that identify the type of statistics collector to run.", - "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters", - "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters" + "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters", + "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters" }, "data_scope" : { "type" : "string", @@ -30580,8 +30580,8 @@ }, "collect_statistics_result" : { "description" : "The summary of the statistics collection job after if finished. Returns the number of collectors analyzed, columns analyzed, statistics results captured.", - "$ref" : "#/components/schemas/CollectStatisticsResult", - "originalRef" : "#/components/schemas/CollectStatisticsResult" + "originalRef" : "#/components/schemas/CollectStatisticsResult", + "$ref" : "#/components/schemas/CollectStatisticsResult" } } } @@ -30612,13 +30612,13 @@ "properties" : { "jobId" : { "description" : "Job id that identifies a job that was started on the DQOps job queue.", - "$ref" : "#/components/schemas/DqoQueueJobId", - "originalRef" : "#/components/schemas/DqoQueueJobId" + "originalRef" : "#/components/schemas/DqoQueueJobId", + "$ref" : "#/components/schemas/DqoQueueJobId" }, "result" : { "description" : "Optional result object that is returned only when the wait parameter was true and the \"collect statistics\" job has finished. Contains the summary result of collecting basic statistics, including the number of statistics collectors (queries) that managed to capture metrics about the table(s). ", - "$ref" : "#/components/schemas/CollectStatisticsResult", - "originalRef" : "#/components/schemas/CollectStatisticsResult" + "originalRef" : "#/components/schemas/CollectStatisticsResult", + "$ref" : "#/components/schemas/CollectStatisticsResult" }, "status" : { "type" : "string", @@ -30772,44 +30772,44 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_text_found_in_set_percent" : { "description" : "The check measures the percentage of rows whose value in a tested column is one of values from a list of expected values or the column value is null. Verifies that the percentage of rows having a valid column value does not exceed the minimum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec" }, "daily_number_found_in_set_percent" : { "description" : "The check measures the percentage of rows whose value in a tested column is one of values from a list of expected values or the column value is null. Verifies that the percentage of rows having a valid column value does not exceed the minimum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec" }, "daily_expected_text_values_in_use_count" : { "description" : "Verifies that the expected string values were found in the column. Raises a data quality issue when too many expected values were not found (were missing). Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec" }, "daily_expected_texts_in_top_values_count" : { "description" : "Verifies that the top X most popular column values contain all values from a list of expected values. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec" }, "daily_expected_numbers_in_use_count" : { "description" : "Verifies that the expected numeric values were found in the column. Raises a data quality issue when too many expected values were not found (were missing). Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec" }, "daily_text_valid_country_code_percent" : { "description" : "Verifies that the percentage of valid country codes in a text column does not fall below the minimum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec" }, "daily_text_valid_currency_code_percent" : { "description" : "Verifies that the percentage of valid currency codes in a text column does not fall below the minimum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec" } } } @@ -30878,44 +30878,44 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_text_found_in_set_percent" : { "description" : "The check measures the percentage of rows whose value in a tested column is one of values from a list of expected values or the column value is null. Verifies that the percentage of rows having a valid column value does not exceed the minimum accepted percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec" }, "daily_partition_number_found_in_set_percent" : { "description" : "The check measures the percentage of rows whose value in a tested column is one of values from a list of expected values or the column value is null. Verifies that the percentage of rows having a valid column value does not exceed the minimum accepted percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec" }, "daily_partition_expected_text_values_in_use_count" : { "description" : "Verifies that the expected string values were found in the column. Raises a data quality issue when too many expected values were not found (were missing). Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec" }, "daily_partition_expected_texts_in_top_values_count" : { "description" : "Verifies that the top X most popular column values contain all values from a list of expected values. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec" }, "daily_partition_expected_numbers_in_use_count" : { "description" : "Verifies that the expected numeric values were found in the column. Raises a data quality issue when too many expected values were not found (were missing). Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec" }, "daily_partition_text_valid_country_code_percent" : { "description" : "Verifies that the percentage of valid country codes in a text column does not fall below the minimum accepted percentage. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec" }, "daily_partition_text_valid_currency_code_percent" : { "description" : "Verifies that the percentage of valid currency codes in a text column does not fall below the minimum accepted percentage. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec" } } } @@ -30984,44 +30984,44 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_text_found_in_set_percent" : { "description" : "The check measures the percentage of rows whose value in a tested column is one of values from a list of expected values or the column value is null. Verifies that the percentage of rows having a valid column value does not exceed the minimum accepted percentage. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec" }, "monthly_number_found_in_set_percent" : { "description" : "The check measures the percentage of rows whose value in a tested column is one of values from a list of expected values or the column value is null. Verifies that the percentage of rows having a valid column value does not exceed the minimum accepted percentage. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec" }, "monthly_expected_text_values_in_use_count" : { "description" : "Verifies that the expected string values were found in the column. Raises a data quality issue when too many expected values were not found (were missing). Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec" }, "monthly_expected_texts_in_top_values_count" : { "description" : "Verifies that the top X most popular column values contain all values from a list of expected values. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec" }, "monthly_expected_numbers_in_use_count" : { "description" : "Verifies that the expected numeric values were found in the column. Raises a data quality issue when too many expected values were not found (were missing). Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec" }, "monthly_text_valid_country_code_percent" : { "description" : "Verifies that the percentage of valid country codes in a text column does not fall below the minimum accepted percentage. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec" }, "monthly_text_valid_currency_code_percent" : { "description" : "Verifies that the percentage of valid currency codes in a text column does not fall below the minimum accepted percentage. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec" } } } @@ -31090,44 +31090,44 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_text_found_in_set_percent" : { "description" : "The check measures the percentage of rows whose value in a tested column is one of values from a list of expected values or the column value is null. Verifies that the percentage of rows having a valid column value does not exceed the minimum accepted percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec" }, "monthly_partition_number_found_in_set_percent" : { "description" : "The check measures the percentage of rows whose value in a tested column is one of values from a list of expected values or the column value is null. Verifies that the percentage of rows having a valid column value does not exceed the minimum accepted percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec" }, "monthly_partition_expected_text_values_in_use_count" : { "description" : "Verifies that the expected string values were found in the column. Raises a data quality issue when too many expected values were not found (were missing). Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec" }, "monthly_partition_expected_texts_in_top_values_count" : { "description" : "Verifies that the top X most popular column values contain all values from a list of expected values. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec" }, "monthly_partition_expected_numbers_in_use_count" : { "description" : "Verifies that the expected numeric values were found in the column. Raises a data quality issue when too many expected values were not found (were missing). Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec" }, "monthly_partition_text_valid_country_code_percent" : { "description" : "Verifies that the percentage of valid country codes in a text column does not fall below the minimum accepted percentage. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec" }, "monthly_partition_text_valid_currency_code_percent" : { "description" : "Verifies that the percentage of valid currency codes in a text column does not fall below the minimum accepted percentage. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec" } } } @@ -31196,44 +31196,44 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_text_found_in_set_percent" : { "description" : "The check measures the percentage of rows whose value in a tested column is one of values from a list of expected values or the column value is null. Verifies that the percentage of rows having a valid column value does not exceed the minimum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextFoundInSetPercentCheckSpec" }, "profile_number_found_in_set_percent" : { "description" : "The check measures the percentage of rows whose value in a tested column is one of values from a list of expected values or the column value is null. Verifies that the percentage of rows having a valid column value does not exceed the minimum accepted percentage.", - "$ref" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberFoundInSetPercentCheckSpec" }, "profile_expected_text_values_in_use_count" : { "description" : "Verifies that the expected string values were found in the column. Raises a data quality issue when too many expected values were not found (were missing).", - "$ref" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedTextValuesInUseCountCheckSpec" }, "profile_expected_texts_in_top_values_count" : { "description" : "Verifies that the top X most popular column values contain all values from a list of expected values.", - "$ref" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedTextsInTopValuesCountCheckSpec" }, "profile_expected_numbers_in_use_count" : { "description" : "Verifies that the expected numeric values were found in the column. Raises a data quality issue when too many expected values were not found (were missing).", - "$ref" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec", + "$ref" : "#/components/schemas/ColumnExpectedNumbersInUseCountCheckSpec" }, "profile_text_valid_country_code_percent" : { "description" : "Verifies that the percentage of valid country codes in a text column does not fall below the minimum accepted percentage", - "$ref" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextValidCountryCodePercentCheckSpec" }, "profile_text_valid_currency_code_percent" : { "description" : "Verifies that the percentage of valid currency codes in a text column does not fall below the minimum accepted percentage", - "$ref" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextValidCurrencyCodePercentCheckSpec" } } } @@ -31335,34 +31335,34 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_total_sum_match_percent" : { "description" : "Verifies that the percentage of difference in total sum of a column in a table and total sum of a column of another table does not exceed the set number. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentCheckSpec" }, "daily_total_min_match_percent" : { "description" : "Verifies that the percentage of difference in total min of a column in a table and total min of a column of another table does not exceed the set number. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentCheckSpec" }, "daily_total_max_match_percent" : { "description" : "Verifies that the percentage of difference in total max of a column in a table and total max of a column of another table does not exceed the set number. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentCheckSpec" }, "daily_total_average_match_percent" : { "description" : "Verifies that the percentage of difference in total average of a column in a table and total average of a column of another table does not exceed the set number. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentCheckSpec" }, "daily_total_not_null_count_match_percent" : { "description" : "Verifies that the percentage of difference in total not null count of a column in a table and total not null count of a column of another table does not exceed the set number. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentCheckSpec" } } } @@ -31421,34 +31421,34 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_total_sum_match_percent" : { "description" : "Verifies that the percentage of difference in total sum of a column in a table and total sum of a column of another table does not exceed the set number. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentCheckSpec" }, "monthly_total_min_match_percent" : { "description" : "Verifies that the percentage of difference in total min of a column in a table and total min of a column of another table does not exceed the set number. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentCheckSpec" }, "monthly_total_max_match_percent" : { "description" : "Verifies that the percentage of difference in total max of a column in a table and total max of a column of another table does not exceed the set number. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentCheckSpec" }, "monthly_total_average_match_percent" : { "description" : "Verifies that the percentage of difference in total average of a column in a table and total average of a column of another table does not exceed the set number. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentCheckSpec" }, "monthly_total_not_null_count_match_percent" : { "description" : "Verifies that the percentage of difference in total not null count of a column in a table and total not null count of a column of another table does not exceed the set number. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentCheckSpec" } } } @@ -31507,34 +31507,34 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_total_sum_match_percent" : { "description" : "Verifies that percentage of the difference in total sum of a column in a table and total sum of a column of another table does not exceed the set number.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentCheckSpec" }, "profile_total_min_match_percent" : { "description" : "Verifies that the percentage of difference in total min of a column in a table and total min of a column of another table does not exceed the set number.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentCheckSpec" }, "profile_total_max_match_percent" : { "description" : "Verifies that the percentage of difference in total max of a column in a table and total max of a column of another table does not exceed the set number.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentCheckSpec" }, "profile_total_average_match_percent" : { "description" : "Verifies that the percentage of difference in total average of a column in a table and total average of a column of another table does not exceed the set number.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentCheckSpec" }, "profile_total_not_null_count_match_percent" : { "description" : "Verifies that the percentage of difference in total not null count of a column in a table and total not null count of a column of another table does not exceed the set number. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentCheckSpec" } } } @@ -31643,15 +31643,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -31688,23 +31688,23 @@ }, "parameters" : { "description" : "Data quality check parameters. Fill the parameters to provide the name of the referenced table and the referenced column.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalAverageMatchPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of difference of average of a table column and of a average of another table column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" } } } @@ -31858,15 +31858,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -31903,23 +31903,23 @@ }, "parameters" : { "description" : "Data quality check parameters. Fill the parameters to provide the name of the referenced table and the referenced column.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalMaxMatchPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of difference of max of a table column and of a max of another table column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" } } } @@ -32073,15 +32073,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -32118,23 +32118,23 @@ }, "parameters" : { "description" : "Data quality check parameters. Fill the parameters to provide the name of the referenced table and the referenced column.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalMinMatchPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of difference of min of a table column and of a min of another table column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" } } } @@ -32288,15 +32288,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -32333,23 +32333,23 @@ }, "parameters" : { "description" : "Data quality check parameters. Fill the parameters to provide the name of the referenced table and the referenced column.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalNotNullCountMatchPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of difference of row count of a table column and of a row count of another table column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" } } } @@ -32503,15 +32503,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -32548,23 +32548,23 @@ }, "parameters" : { "description" : "Data quality check parameters. Fill the parameters to provide the name of the referenced table and the referenced column.", - "$ref" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnAccuracyTotalSumMatchPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of difference of sum of a table column and of a sum of another table column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" } } } @@ -32728,94 +32728,94 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_sum_anomaly" : { "description" : "Verifies that the sum in a column changes in a rate within a percentile boundary during the last 90 days.", - "$ref" : "#/components/schemas/ColumnSumAnomalyDifferencingCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumAnomalyDifferencingCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumAnomalyDifferencingCheckSpec", + "$ref" : "#/components/schemas/ColumnSumAnomalyDifferencingCheckSpec" }, "daily_mean_anomaly" : { "description" : "Verifies that the mean value in a column changes in a rate within a percentile boundary during the last 90 days.", - "$ref" : "#/components/schemas/ColumnMeanAnomalyStationaryCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanAnomalyStationaryCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanAnomalyStationaryCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanAnomalyStationaryCheckSpec" }, "daily_median_anomaly" : { "description" : "Verifies that the median in a column changes in a rate within a percentile boundary during the last 90 days.", - "$ref" : "#/components/schemas/ColumnMedianAnomalyStationaryCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianAnomalyStationaryCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianAnomalyStationaryCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianAnomalyStationaryCheckSpec" }, "daily_min_anomaly" : { "description" : "Detects new outliers, which are new minimum values, much below the last known minimum value. If the minimum value is constantly changing, detects outliers as the biggest change of the minimum value during the last 90 days.", - "$ref" : "#/components/schemas/ColumnMinAnomalyDifferencingCheckSpec", - "originalRef" : "#/components/schemas/ColumnMinAnomalyDifferencingCheckSpec" + "originalRef" : "#/components/schemas/ColumnMinAnomalyDifferencingCheckSpec", + "$ref" : "#/components/schemas/ColumnMinAnomalyDifferencingCheckSpec" }, "daily_max_anomaly" : { "description" : "Detects new outliers, which are new maximum values, much above the last known maximum value. If the maximum value is constantly changing, detects outliers as the biggest change of the maximum value during the last 90 days.", - "$ref" : "#/components/schemas/ColumnMaxAnomalyDifferencingCheckSpec", - "originalRef" : "#/components/schemas/ColumnMaxAnomalyDifferencingCheckSpec" + "originalRef" : "#/components/schemas/ColumnMaxAnomalyDifferencingCheckSpec", + "$ref" : "#/components/schemas/ColumnMaxAnomalyDifferencingCheckSpec" }, "daily_mean_change" : { "description" : "Verifies that the mean value in a column changed in a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnMeanChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanChangeCheckSpec" }, "daily_median_change" : { "description" : "Verifies that the median in a column changed in a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnMedianChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianChangeCheckSpec" }, "daily_sum_change" : { "description" : "Verifies that the sum in a column changed in a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnSumChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSumChangeCheckSpec" }, "daily_mean_change_1_day" : { "description" : "Verifies that the mean value in a column changed in a fixed rate since last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnMeanChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanChange1DayCheckSpec" }, "daily_mean_change_7_days" : { "description" : "Verifies that the mean value in a column changed in a fixed rate since last readout from last week.", - "$ref" : "#/components/schemas/ColumnMeanChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanChange7DaysCheckSpec" }, "daily_mean_change_30_days" : { "description" : "Verifies that the mean value in a column changed in a fixed rate since last readout from last month.", - "$ref" : "#/components/schemas/ColumnMeanChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanChange30DaysCheckSpec" }, "daily_median_change_1_day" : { "description" : "Verifies that the median in a column changed in a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnMedianChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianChange1DayCheckSpec" }, "daily_median_change_7_days" : { "description" : "Verifies that the median in a column changed in a fixed rate since the last readout from the last week.", - "$ref" : "#/components/schemas/ColumnMedianChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianChange7DaysCheckSpec" }, "daily_median_change_30_days" : { "description" : "Verifies that the median in a column changed in a fixed rate since the last readout from the last month.", - "$ref" : "#/components/schemas/ColumnMedianChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianChange30DaysCheckSpec" }, "daily_sum_change_1_day" : { "description" : "Verifies that the sum in a column changed in a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnSumChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnSumChange1DayCheckSpec" }, "daily_sum_change_7_days" : { "description" : "Verifies that the sum in a column changed in a fixed rate since the last readout from the last week.", - "$ref" : "#/components/schemas/ColumnSumChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnSumChange7DaysCheckSpec" }, "daily_sum_change_30_days" : { "description" : "Verifies that the sum in a column changed in a fixed rate since the last readout from the last month.", - "$ref" : "#/components/schemas/ColumnSumChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnSumChange30DaysCheckSpec" } } } @@ -32934,94 +32934,94 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_sum_anomaly" : { "description" : "Verifies that the sum in a column is within a percentile from measurements made during the last 90 days. Calculates the sum of each daily partition and detect anomalies between daily partitions.", - "$ref" : "#/components/schemas/ColumnSumAnomalyStationaryPartitionCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumAnomalyStationaryPartitionCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumAnomalyStationaryPartitionCheckSpec", + "$ref" : "#/components/schemas/ColumnSumAnomalyStationaryPartitionCheckSpec" }, "daily_partition_mean_anomaly" : { "description" : "Verifies that the mean value in a column is within a percentile from measurements made during the last 90 days. Calculates the mean (average) of each daily partition and detect anomalies between daily partitions.", - "$ref" : "#/components/schemas/ColumnMeanAnomalyStationaryCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanAnomalyStationaryCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanAnomalyStationaryCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanAnomalyStationaryCheckSpec" }, "daily_partition_median_anomaly" : { "description" : "Verifies that the median in a column is within a percentile from measurements made during the last 90 days. Calculates the median of each daily partition and detect anomalies between daily partitions.", - "$ref" : "#/components/schemas/ColumnMedianAnomalyStationaryCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianAnomalyStationaryCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianAnomalyStationaryCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianAnomalyStationaryCheckSpec" }, "daily_partition_min_anomaly" : { "description" : "Detects new outliers, which are new minimum values, much below the last known minimum value. If the minimum value is constantly changing, detects outliers as the biggest change of the minimum value during the last 90 days. Finds the minimum value of each daily partition and detect anomalies between daily partitions.", - "$ref" : "#/components/schemas/ColumnMinAnomalyStationaryCheckSpec", - "originalRef" : "#/components/schemas/ColumnMinAnomalyStationaryCheckSpec" + "originalRef" : "#/components/schemas/ColumnMinAnomalyStationaryCheckSpec", + "$ref" : "#/components/schemas/ColumnMinAnomalyStationaryCheckSpec" }, "daily_partition_max_anomaly" : { "description" : "Detects new outliers, which are new maximum values, much above the last known maximum value. If the maximum value is constantly changing, detects outliers as the biggest change of the maximum value during the last 90 days. Finds the maximum value of each daily partition and detect anomalies between daily partitions.", - "$ref" : "#/components/schemas/ColumnMaxAnomalyStationaryCheckSpec", - "originalRef" : "#/components/schemas/ColumnMaxAnomalyStationaryCheckSpec" + "originalRef" : "#/components/schemas/ColumnMaxAnomalyStationaryCheckSpec", + "$ref" : "#/components/schemas/ColumnMaxAnomalyStationaryCheckSpec" }, "daily_partition_mean_change" : { "description" : "Verifies that the mean value in a column changed in a fixed rate since last readout.", - "$ref" : "#/components/schemas/ColumnMeanChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanChangeCheckSpec" }, "daily_partition_median_change" : { "description" : "Verifies that the median in a column changed in a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnMedianChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianChangeCheckSpec" }, "daily_partition_sum_change" : { "description" : "Verifies that the sum in a column changed in a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnSumChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSumChangeCheckSpec" }, "daily_partition_mean_change_1_day" : { "description" : "Verifies that the mean value in a column changed in a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnMeanChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanChange1DayCheckSpec" }, "daily_partition_mean_change_7_days" : { "description" : "Verifies that the mean value in a column changed in a fixed rate since the last readout from the last week.", - "$ref" : "#/components/schemas/ColumnMeanChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanChange7DaysCheckSpec" }, "daily_partition_mean_change_30_days" : { "description" : "Verifies that the mean value in a column changed in a fixed rate since the last readout from the last month.", - "$ref" : "#/components/schemas/ColumnMeanChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanChange30DaysCheckSpec" }, "daily_partition_median_change_1_day" : { "description" : "Verifies that the median in a column changed in a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnMedianChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianChange1DayCheckSpec" }, "daily_partition_median_change_7_days" : { "description" : "Verifies that the median in a column changed in a fixed rate since the last readout from the last week.", - "$ref" : "#/components/schemas/ColumnMedianChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianChange7DaysCheckSpec" }, "daily_partition_median_change_30_days" : { "description" : "Verifies that the median in a column changed in a fixed rate since the last readout from the last month.", - "$ref" : "#/components/schemas/ColumnMedianChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianChange30DaysCheckSpec" }, "daily_partition_sum_change_1_day" : { "description" : "Verifies that the sum in a column changed in a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnSumChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnSumChange1DayCheckSpec" }, "daily_partition_sum_change_7_days" : { "description" : "Verifies that the sum in a column changed in a fixed rate since the last readout from the last week.", - "$ref" : "#/components/schemas/ColumnSumChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnSumChange7DaysCheckSpec" }, "daily_partition_sum_change_30_days" : { "description" : "Verifies that the sum in a column changed in a fixed rate since the last readout from the last month.", - "$ref" : "#/components/schemas/ColumnSumChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnSumChange30DaysCheckSpec" } } } @@ -33140,94 +33140,94 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_sum_anomaly" : { "description" : "Verifies that the sum in a column changes in a rate within a percentile boundary during the last 90 days.", - "$ref" : "#/components/schemas/ColumnSumAnomalyDifferencingCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumAnomalyDifferencingCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumAnomalyDifferencingCheckSpec", + "$ref" : "#/components/schemas/ColumnSumAnomalyDifferencingCheckSpec" }, "profile_mean_anomaly" : { "description" : "Verifies that the mean value in a column changes in a rate within a percentile boundary during the last 90 days.", - "$ref" : "#/components/schemas/ColumnMeanAnomalyStationaryCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanAnomalyStationaryCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanAnomalyStationaryCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanAnomalyStationaryCheckSpec" }, "profile_median_anomaly" : { "description" : "Verifies that the median in a column changes in a rate within a percentile boundary during the last 90 days.", - "$ref" : "#/components/schemas/ColumnMedianAnomalyStationaryCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianAnomalyStationaryCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianAnomalyStationaryCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianAnomalyStationaryCheckSpec" }, "profile_min_anomaly" : { "description" : "Detects new outliers, which are new minimum values, much below the last known minimum value. If the minimum value is constantly changing, detects outliers as the biggest change of the minimum value during the last 90 days.", - "$ref" : "#/components/schemas/ColumnMinAnomalyDifferencingCheckSpec", - "originalRef" : "#/components/schemas/ColumnMinAnomalyDifferencingCheckSpec" + "originalRef" : "#/components/schemas/ColumnMinAnomalyDifferencingCheckSpec", + "$ref" : "#/components/schemas/ColumnMinAnomalyDifferencingCheckSpec" }, "profile_max_anomaly" : { "description" : "Detects new outliers, which are new maximum values, much above the last known maximum value. If the maximum value is constantly changing, detects outliers as the biggest change of the maximum value during the last 90 days.", - "$ref" : "#/components/schemas/ColumnMaxAnomalyDifferencingCheckSpec", - "originalRef" : "#/components/schemas/ColumnMaxAnomalyDifferencingCheckSpec" + "originalRef" : "#/components/schemas/ColumnMaxAnomalyDifferencingCheckSpec", + "$ref" : "#/components/schemas/ColumnMaxAnomalyDifferencingCheckSpec" }, "profile_mean_change" : { "description" : "Verifies that the mean value in a column changed in a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnMeanChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanChangeCheckSpec" }, "profile_median_change" : { "description" : "Verifies that the median in a column changed in a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnMedianChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianChangeCheckSpec" }, "profile_sum_change" : { "description" : "Verifies that the sum in a column changed in a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnSumChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSumChangeCheckSpec" }, "profile_mean_change_1_day" : { "description" : "Verifies that the mean value in a column changed in a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnMeanChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanChange1DayCheckSpec" }, "profile_mean_change_7_days" : { "description" : "Verifies that the mean value in a column changed in a fixed rate since the last readout from the last week.", - "$ref" : "#/components/schemas/ColumnMeanChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanChange7DaysCheckSpec" }, "profile_mean_change_30_days" : { "description" : "Verifies that the mean value in a column changed in a fixed rate since the last readout from the last month.", - "$ref" : "#/components/schemas/ColumnMeanChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanChange30DaysCheckSpec" }, "profile_median_change_1_day" : { "description" : "Verifies that the median in a column changed in a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnMedianChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianChange1DayCheckSpec" }, "profile_median_change_7_days" : { "description" : "Verifies that the median in a column changed in a fixed rate since the last readout from the last week.", - "$ref" : "#/components/schemas/ColumnMedianChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianChange7DaysCheckSpec" }, "profile_median_change_30_days" : { "description" : "Verifies that the median in a column changed in a fixed rate since the last readout from the last month.", - "$ref" : "#/components/schemas/ColumnMedianChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianChange30DaysCheckSpec" }, "profile_sum_change_1_day" : { "description" : "Verifies that the sum in a column changed in a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnSumChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnSumChange1DayCheckSpec" }, "profile_sum_change_7_days" : { "description" : "Verifies that the sum in a column changed in a fixed rate since the last readout from last week.", - "$ref" : "#/components/schemas/ColumnSumChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnSumChange7DaysCheckSpec" }, "profile_sum_change_30_days" : { "description" : "Verifies that the sum in a column changed in a fixed rate since the last readout from last month.", - "$ref" : "#/components/schemas/ColumnSumChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnSumChange30DaysCheckSpec" } } } @@ -33271,19 +33271,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_true_percent" : { "description" : "Measures the percentage of **true** values in a boolean column and verifies that it is within the accepted range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTruePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTruePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTruePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTruePercentCheckSpec" }, "daily_false_percent" : { "description" : "Measures the percentage of **false** values in a boolean column and verifies that it is within the accepted range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnFalsePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnFalsePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnFalsePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnFalsePercentCheckSpec" } } } @@ -33327,19 +33327,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_true_percent" : { "description" : "Measures the percentage of **true** values in a boolean column and verifies that it is within the accepted range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnTruePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTruePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTruePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTruePercentCheckSpec" }, "daily_partition_false_percent" : { "description" : "Measures the percentage of **false** values in a boolean column and verifies that it is within the accepted range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnFalsePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnFalsePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnFalsePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnFalsePercentCheckSpec" } } } @@ -33406,19 +33406,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_true_percent" : { "description" : "Measures the percentage of **true** values in a boolean column and verifies that it is within the accepted range. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTruePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTruePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTruePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTruePercentCheckSpec" }, "monthly_false_percent" : { "description" : "Measures the percentage of **false** values in a boolean column and verifies that it is within the accepted range. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnFalsePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnFalsePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnFalsePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnFalsePercentCheckSpec" } } } @@ -33462,19 +33462,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_true_percent" : { "description" : "Measures the percentage of **true** values in a boolean column and verifies that it is within the accepted range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnTruePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTruePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTruePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTruePercentCheckSpec" }, "monthly_partition_false_percent" : { "description" : "Measures the percentage of **false** values in a boolean column and verifies that it is within the accepted range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnFalsePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnFalsePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnFalsePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnFalsePercentCheckSpec" } } } @@ -33518,19 +33518,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_true_percent" : { "description" : "Measures the percentage of **true** values in a boolean column and verifies that it is within the accepted range.", - "$ref" : "#/components/schemas/ColumnTruePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTruePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTruePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTruePercentCheckSpec" }, "profile_false_percent" : { "description" : "Measures the percentage of **false** values in a boolean column and verifies that it is within the accepted range.", - "$ref" : "#/components/schemas/ColumnFalsePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnFalsePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnFalsePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnFalsePercentCheckSpec" } } } @@ -33670,8 +33670,8 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "reference_column" : { @@ -33680,33 +33680,33 @@ }, "daily_sum_match" : { "description" : "Verifies that percentage of the difference between the sum of values in a tested column in a parent table and the sum of a values in a column in the reference table. The difference must be below defined percentage thresholds. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec" }, "daily_min_match" : { "description" : "Verifies that percentage of the difference between the minimum value in a tested column in a parent table and the minimum value in a column in the reference table. The difference must be below defined percentage thresholds. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec" }, "daily_max_match" : { "description" : "Verifies that percentage of the difference between the maximum value in a tested column in a parent table and the maximum value in a column in the reference table. The difference must be below defined percentage thresholds. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec" }, "daily_mean_match" : { "description" : "Verifies that percentage of the difference between the mean (average) value in a tested column in a parent table and the mean (average) value in a column in the reference table. The difference must be below defined percentage thresholds. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec" }, "daily_not_null_count_match" : { "description" : "Verifies that percentage of the difference between the count of not null values in a tested column in a parent table and the count of not null values in a column in the reference table. The difference must be below defined percentage thresholds. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec" }, "daily_null_count_match" : { "description" : "Verifies that percentage of the difference between the count of null values in a tested column in a parent table and the count of null values in a column in the reference table. The difference must be below defined percentage thresholds. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec" } } } @@ -33777,8 +33777,8 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "reference_column" : { @@ -33787,33 +33787,33 @@ }, "daily_partition_sum_match" : { "description" : "Verifies that percentage of the difference between the sum of values in a tested column in a parent table and the sum of a values in a column in the reference table. The difference must be below defined percentage thresholds. Compares each daily partition (each day of data) between the compared table and the reference table (the source of truth).", - "$ref" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec" }, "daily_partition_min_match" : { "description" : "Verifies that percentage of the difference between the minimum value in a tested column in a parent table and the minimum value in a column in the reference table. The difference must be below defined percentage thresholds. Compares each daily partition (each day of data) between the compared table and the reference table (the source of truth).", - "$ref" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec" }, "daily_partition_max_match" : { "description" : "Verifies that percentage of the difference between the maximum value in a tested column in a parent table and the maximum value in a column in the reference table. The difference must be below defined percentage thresholds. Compares each daily partition (each day of data) between the compared table and the reference table (the source of truth).", - "$ref" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec" }, "daily_partition_mean_match" : { "description" : "Verifies that percentage of the difference between the mean (average) value in a tested column in a parent table and the mean (average) value in a column in the reference table. The difference must be below defined percentage thresholds. Compares each daily partition (each day of data) between the compared table and the reference table (the source of truth).", - "$ref" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec" }, "daily_partition_not_null_count_match" : { "description" : "Verifies that percentage of the difference between the count of not null values in a tested column in a parent table and the count of not null values in a column in the reference table. The difference must be below defined percentage thresholds. Compares each daily partition (each day of data) between the compared table and the reference table (the source of truth).", - "$ref" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec" }, "daily_partition_null_count_match" : { "description" : "Verifies that percentage of the difference between the count of null values in a tested column in a parent table and the count of null values in a column in the reference table. The difference must be below defined percentage thresholds. Compares each daily partition (each day of data) between the compared table and the reference table (the source of truth).", - "$ref" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec" } } } @@ -33922,15 +33922,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -33967,23 +33967,23 @@ }, "parameters" : { "description" : "Sum sensor parameters.", - "$ref" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec" }, "warning" : { "description" : "Warning level threshold to raise a data quality incident with a warning severity level when the maximum values in the column in the parent table and the maximum value in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" }, "error" : { "description" : "Error level threshold to raise a data quality incident with an error severity level when the maximum values in the column in the parent table and the maximum value in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" }, "fatal" : { "description" : "Fatal level threshold to raise a data quality incident with a fatal severity level when the maximum values in the column in the parent table and the maximum value in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" } } } @@ -34092,15 +34092,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -34137,23 +34137,23 @@ }, "parameters" : { "description" : "Sum sensor parameters.", - "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" }, "warning" : { "description" : "Warning level threshold to raise a data quality incident with a warning severity level when the mean (average) of the values in the column in the parent table and the mean (average) of values in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" }, "error" : { "description" : "Error level threshold to raise a data quality incident with an error severity level when the mean (average) of the values in the column in the parent table and the mean (average) of values in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" }, "fatal" : { "description" : "Fatal level threshold to raise a data quality incident with a fatal severity level when the mean (average) of the values in the column in the parent table and the mean (average) of values in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" } } } @@ -34262,15 +34262,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -34307,23 +34307,23 @@ }, "parameters" : { "description" : "Sum sensor parameters.", - "$ref" : "#/components/schemas/ColumnNumericMinSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMinSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMinSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMinSensorParametersSpec" }, "warning" : { "description" : "Warning level threshold to raise a data quality incident with a warning severity level when the minimum values in the column in the parent table and the minimum value in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" }, "error" : { "description" : "Error level threshold to raise a data quality incident with an error severity level when the minimum values in the column in the parent table and the minimum value in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" }, "fatal" : { "description" : "Fatal level threshold to raise a data quality incident with a fatal severity level when the minimum values in the column in the parent table and the minimum value in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" } } } @@ -34392,33 +34392,33 @@ }, "compare_min" : { "description" : "The column compare configuration for comparing the minimum value between the compared (tested) column and the reference column. Leave null when the measure is not compared.", - "$ref" : "#/components/schemas/CompareThresholdsModel", - "originalRef" : "#/components/schemas/CompareThresholdsModel" + "originalRef" : "#/components/schemas/CompareThresholdsModel", + "$ref" : "#/components/schemas/CompareThresholdsModel" }, "compare_max" : { "description" : "The column compare configuration for comparing the maximum value between the compared (tested) column and the reference column. Leave null when the measure is not compared.", - "$ref" : "#/components/schemas/CompareThresholdsModel", - "originalRef" : "#/components/schemas/CompareThresholdsModel" + "originalRef" : "#/components/schemas/CompareThresholdsModel", + "$ref" : "#/components/schemas/CompareThresholdsModel" }, "compare_sum" : { "description" : "The column compare configuration for comparing the sum of values between the compared (tested) column and the reference column. Leave null when the measure is not compared.", - "$ref" : "#/components/schemas/CompareThresholdsModel", - "originalRef" : "#/components/schemas/CompareThresholdsModel" + "originalRef" : "#/components/schemas/CompareThresholdsModel", + "$ref" : "#/components/schemas/CompareThresholdsModel" }, "compare_mean" : { "description" : "The column compare configuration for comparing the mean (average) value between the compared (tested) column and the reference column. Leave null when the measure is not compared.", - "$ref" : "#/components/schemas/CompareThresholdsModel", - "originalRef" : "#/components/schemas/CompareThresholdsModel" + "originalRef" : "#/components/schemas/CompareThresholdsModel", + "$ref" : "#/components/schemas/CompareThresholdsModel" }, "compare_null_count" : { "description" : "The column compare configuration for comparing the count of null values between the compared (tested) column and the reference column. Leave null when the measure is not compared.", - "$ref" : "#/components/schemas/CompareThresholdsModel", - "originalRef" : "#/components/schemas/CompareThresholdsModel" + "originalRef" : "#/components/schemas/CompareThresholdsModel", + "$ref" : "#/components/schemas/CompareThresholdsModel" }, "compare_not_null_count" : { "description" : "The column compare configuration for comparing the count of not null values between the compared (tested) column and the reference column. Leave null when the measure is not compared.", - "$ref" : "#/components/schemas/CompareThresholdsModel", - "originalRef" : "#/components/schemas/CompareThresholdsModel" + "originalRef" : "#/components/schemas/CompareThresholdsModel", + "$ref" : "#/components/schemas/CompareThresholdsModel" } }, "description" : "The column to column comparison model used to select which measures (min, max, sum, mean, null count, not nul count) are compared for this column between the compared (tested) column and the reference column from the reference table." @@ -34490,8 +34490,8 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "reference_column" : { @@ -34500,33 +34500,33 @@ }, "monthly_sum_match" : { "description" : "Verifies that percentage of the difference between the sum of values in a tested column in a parent table and the sum of a values in a column in the reference table. The difference must be below defined percentage thresholds. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec" }, "monthly_min_match" : { "description" : "Verifies that percentage of the difference between the minimum value in a tested column in a parent table and the minimum value in a column in the reference table. The difference must be below defined percentage thresholds. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec" }, "monthly_max_match" : { "description" : "Verifies that percentage of the difference between the maximum value in a tested column in a parent table and the maximum value in a column in the reference table. The difference must be below defined percentage thresholds. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec" }, "monthly_mean_match" : { "description" : "Verifies that percentage of the difference between the mean (average) value in a tested column in a parent table and the mean (average) value in a column in the reference table. The difference must be below defined percentage thresholds. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec" }, "monthly_not_null_count_match" : { "description" : "Verifies that percentage of the difference between the count of not null values in a tested column in a parent table and the count of not null values in a column in the reference table. The difference must be below defined percentage thresholds. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec" }, "monthly_null_count_match" : { "description" : "Verifies that percentage of the difference between the count of null values in a tested column in a parent table and the count of null values in a column in the reference table. The difference must be below defined percentage thresholds. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec" } } } @@ -34597,8 +34597,8 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "reference_column" : { @@ -34607,33 +34607,33 @@ }, "monthly_partition_sum_match" : { "description" : "Verifies that percentage of the difference between the sum of values in a tested column in a parent table and the sum of a values in a column in the reference table. The difference must be below defined percentage thresholds. Compares each monthly partition (each month of data) between the compared table and the reference table (the source of truth).", - "$ref" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec" }, "monthly_partition_min_match" : { "description" : "Verifies that percentage of the difference between the minimum value in a tested column in a parent table and the minimum value in a column in the reference table. The difference must be below defined percentage thresholds. Compares each monthly partition (each month of data) between the compared table and the reference table (the source of truth).", - "$ref" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec" }, "monthly_partition_max_match" : { "description" : "Verifies that percentage of the difference between the maximum value in a tested column in a parent table and the maximum value in a column in the reference table. The difference must be below defined percentage thresholds. Compares each monthly partition (each month of data) between the compared table and the reference table (the source of truth).", - "$ref" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec" }, "monthly_partition_mean_match" : { "description" : "Verifies that percentage of the difference between the mean (average) value in a tested column in a parent table and the mean (average) value in a column in the reference table. The difference must be below defined percentage thresholds. Compares each monthly partition (each month of data) between the compared table and the reference table (the source of truth).", - "$ref" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec" }, "monthly_partition_not_null_count_match" : { "description" : "Verifies that percentage of the difference between the count of not null values in a tested column in a parent table and the count of not null values in a column in the reference table. The difference must be below defined percentage thresholds. Compares each monthly partition (each month of data) between the compared table and the reference table (the source of truth).", - "$ref" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec" }, "monthly_partition_null_count_match" : { "description" : "Verifies that percentage of the difference between the count of null values in a tested column in a parent table and the count of null values in a column in the reference table. The difference must be below defined percentage thresholds. Compares each monthly partition (each month of data) between the compared table and the reference table (the source of truth).", - "$ref" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec" } } } @@ -34742,15 +34742,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -34787,23 +34787,23 @@ }, "parameters" : { "description" : "Sum sensor parameters.", - "$ref" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec" }, "warning" : { "description" : "Warning level threshold to raise a data quality incident with a warning severity level when the count of not null values in the column in the parent table and the count of not null values in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" }, "error" : { "description" : "Error level threshold to raise a data quality incident with an error severity level when the count of not null values in the column in the parent table and the count of not null values in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" }, "fatal" : { "description" : "Fatal level threshold to raise a data quality incident with a fatal severity level when the count of not null values in the column in the parent table and the count of not null values in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" } } } @@ -34912,15 +34912,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -34957,23 +34957,23 @@ }, "parameters" : { "description" : "Sum sensor parameters.", - "$ref" : "#/components/schemas/ColumnNullsNullsCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNullsCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNullsCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNullsCountSensorParametersSpec" }, "warning" : { "description" : "Warning level threshold to raise a data quality incident with a warning severity level when the count of null values in the column in the parent table and the count of null values in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" }, "error" : { "description" : "Error level threshold to raise a data quality incident with an error severity level when the count of null values in the column in the parent table and the count of null values in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" }, "fatal" : { "description" : "Fatal level threshold to raise a data quality incident with a fatal severity level when the count of null values in the column in the parent table and the count of null values in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" } } } @@ -35044,8 +35044,8 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "reference_column" : { @@ -35054,33 +35054,33 @@ }, "profile_sum_match" : { "description" : "Verifies that percentage of the difference between the sum of values in a tested column in a parent table and the sum of a values in a column in the reference table. The difference must be below defined percentage thresholds.", - "$ref" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonSumMatchCheckSpec" }, "profile_min_match" : { "description" : "Verifies that percentage of the difference between the minimum value in a tested column in a parent table and the minimum value in a column in the reference table. The difference must be below defined percentage thresholds.", - "$ref" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMinMatchCheckSpec" }, "profile_max_match" : { "description" : "Verifies that percentage of the difference between the maximum value in a tested column in a parent table and the maximum value in a column in the reference table. The difference must be below defined percentage thresholds.", - "$ref" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMaxMatchCheckSpec" }, "profile_mean_match" : { "description" : "Verifies that percentage of the difference between the mean (average) value in a tested column in a parent table and the mean (average) value in a column in the reference table. The difference must be below defined percentage thresholds.", - "$ref" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonMeanMatchCheckSpec" }, "profile_not_null_count_match" : { "description" : "Verifies that percentage of the difference between the count of not null values in a tested column in a parent table and the count of not null values in a column in the reference table. The difference must be below defined percentage thresholds.", - "$ref" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonNotNullCountMatchCheckSpec" }, "profile_null_count_match" : { "description" : "Verifies that percentage of the difference between the count of null values in a tested column in a parent table and the count of null values in a column in the reference table. The difference must be below defined percentage thresholds.", - "$ref" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec", - "originalRef" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec" + "originalRef" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec", + "$ref" : "#/components/schemas/ColumnComparisonNullCountMatchCheckSpec" } } } @@ -35189,15 +35189,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -35234,23 +35234,23 @@ }, "parameters" : { "description" : "Sum sensor parameters.", - "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" }, "warning" : { "description" : "Warning level threshold to raise a data quality incident with a warning severity level when the sum of the values in the column in the parent table and the sum of values in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" }, "error" : { "description" : "Error level threshold to raise a data quality incident with an error severity level when the sum of the values in the column in the parent table and the sum of values in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" }, "fatal" : { "description" : "Fatal level threshold to raise a data quality incident with a fatal severity level when the sum of the values in the column in the parent table and the sum of values in the compared column (in the reference table) do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" } } } @@ -35304,29 +35304,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_text_parsable_to_boolean_percent" : { "description" : "Verifies that the percentage of text values that are parsable to a boolean value does not fall below the minimum accepted percentage, text values identified as boolean placeholders are: 0, 1, true, false, t, f, yes, no, y, n. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec" }, "daily_text_parsable_to_integer_percent" : { "description" : "Verifies that the percentage text values that are parsable to an integer value in a column does not fall below the minimum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec" }, "daily_text_parsable_to_float_percent" : { "description" : "Verifies that the percentage text values that are parsable to a float value in a column does not fall below the minimum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec" }, "daily_text_parsable_to_date_percent" : { "description" : "Verifies that the percentage text values that are parsable to a date value in a column does not fall below the minimum accepted percentage. DQOps uses a safe_cast when possible, otherwise the text is verified using a regular expression. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec" } } } @@ -35380,29 +35380,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_text_parsable_to_boolean_percent" : { "description" : "Verifies that the percentage of text values that are parsable to a boolean value does not fall below the minimum accepted percentage, text values identified as boolean placeholders are: 0, 1, true, false, t, f, yes, no, y, n. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec" }, "daily_partition_text_parsable_to_integer_percent" : { "description" : "Verifies that the percentage text values that are parsable to an integer value in a column does not fall below the minimum accepted percentage. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec" }, "daily_partition_text_parsable_to_float_percent" : { "description" : "Verifies that the percentage text values that are parsable to a float value in a column does not fall below the minimum accepted percentage. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec" }, "daily_partition_text_parsable_to_date_percent" : { "description" : "Verifies that the percentage text values that are parsable to a date value in a column does not fall below the minimum accepted percentage. DQOps uses a safe_cast when possible, otherwise the text is verified using a regular expression. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec" } } } @@ -35456,29 +35456,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_text_parsable_to_boolean_percent" : { "description" : "Verifies that the percentage of text values that are parsable to a boolean value does not fall below the minimum accepted percentage, text values identified as boolean placeholders are: 0, 1, true, false, t, f, yes, no, y, n. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec" }, "monthly_text_parsable_to_integer_percent" : { "description" : "Verifies that the percentage text values that are parsable to an integer value in a column does not fall below the minimum accepted percentage. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec" }, "monthly_text_parsable_to_float_percent" : { "description" : "Verifies that the percentage text values that are parsable to a float value in a column does not fall below the minimum accepted percentage. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec" }, "monthly_text_parsable_to_date_percent" : { "description" : "Verifies that the percentage text values that are parsable to a date value in a column does not fall below the minimum accepted percentage. DQOps uses a safe_cast when possible, otherwise the text is verified using a regular expression. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec" } } } @@ -35532,29 +35532,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_text_parsable_to_boolean_percent" : { "description" : "Verifies that the percentage of text values that are parsable to a boolean value does not fall below the minimum accepted percentage, text values identified as boolean placeholders are: 0, 1, true, false, t, f, yes, no, y, n. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec" }, "monthly_partition_text_parsable_to_integer_percent" : { "description" : "Verifies that the percentage text values that are parsable to an integer value in a column does not fall below the minimum accepted percentage. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec" }, "monthly_partition_text_parsable_to_float_percent" : { "description" : "Verifies that the percentage text values that are parsable to a float value in a column does not fall below the minimum accepted percentage. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec" }, "monthly_partition_text_parsable_to_date_percent" : { "description" : "Verifies that the percentage text values that are parsable to a date value in a column does not fall below the minimum accepted percentage. DQOps uses a safe_cast when possible, otherwise the text is verified using a regular expression. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec" } } } @@ -35608,29 +35608,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_text_parsable_to_boolean_percent" : { "description" : "Verifies that the percentage of text values that are parsable to a boolean value does not fall below the minimum accepted percentage, text values identified as boolean placeholders are: 0, 1, true, false, t, f, yes, no, y, n.", - "$ref" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToBooleanPercentCheckSpec" }, "profile_text_parsable_to_integer_percent" : { "description" : "Verifies that the percentage text values that are parsable to an integer value in a column does not fall below the minimum accepted percentage", - "$ref" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToIntegerPercentCheckSpec" }, "profile_text_parsable_to_float_percent" : { "description" : "Verifies that the percentage text values that are parsable to a float value in a column does not fall below the minimum accepted percentage", - "$ref" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToFloatPercentCheckSpec" }, "profile_text_parsable_to_date_percent" : { "description" : "Verifies that the percentage text values that are parsable to a date value in a column does not fall below the minimum accepted percentage. DQOps uses a safe_cast when possible, otherwise the text is verified using a regular expression", - "$ref" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextParsableToDatePercentCheckSpec" } } } @@ -35803,16 +35803,16 @@ "type" : "object", "description" : "The dictionary of statuses for data quality checks. The keys are data quality check names, the values are the current data quality check statuses that describe the most current status.", "additionalProperties" : { - "$ref" : "#/definitions/CheckCurrentDataQualityStatusModel", - "originalRef" : "#/definitions/CheckCurrentDataQualityStatusModel" + "originalRef" : "#/definitions/CheckCurrentDataQualityStatusModel", + "$ref" : "#/definitions/CheckCurrentDataQualityStatusModel" } }, "dimensions" : { "type" : "object", "description" : "Dictionary of the current data quality statues for each data quality dimension.", "additionalProperties" : { - "$ref" : "#/definitions/DimensionCurrentDataQualityStatusModel", - "originalRef" : "#/definitions/DimensionCurrentDataQualityStatusModel" + "originalRef" : "#/definitions/DimensionCurrentDataQualityStatusModel", + "$ref" : "#/definitions/DimensionCurrentDataQualityStatusModel" } } }, @@ -35868,29 +35868,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_sql_condition_failed_on_column" : { "description" : "Verifies that a custom SQL expression is met for each row. Counts the number of rows where the expression is not satisfied, and raises an issue if too many failures were detected. This check is used also to compare values between the current column and another column: `{alias}.{column} > col_tax`. Stores the most recent captured count of failed rows for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec" }, "daily_sql_condition_passed_percent_on_column" : { "description" : "Verifies that a minimum percentage of rows passed a custom SQL condition (expression). Reference the current column by using tokens, for example: `{alias}.{column} > {alias}.col_tax`. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec" }, "daily_sql_aggregate_expression_on_column" : { "description" : "Verifies that a custom aggregated SQL expression (MIN, MAX, etc.) is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec" }, "daily_import_custom_result_on_column" : { "description" : "Runs a custom query that retrieves a result of a data quality check performed in the data engineering, whose result (the severity level) is pulled from a separate table.", - "$ref" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec" } } } @@ -35944,29 +35944,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_sql_condition_failed_on_column" : { "description" : "Verifies that a custom SQL expression is met for each row. Counts the number of rows where the expression is not satisfied, and raises an issue if too many failures were detected. This check is used also to compare values between the current column and another column: `{alias}.{column} > {alias}.col_tax`. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec" }, "daily_partition_sql_condition_passed_percent_on_column" : { "description" : "Verifies that a minimum percentage of rows passed a custom SQL condition (expression). Reference the current column by using tokens, for example: `{alias}.{column} > {alias}.col_tax`. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec" }, "daily_partition_sql_aggregate_expression_on_column" : { "description" : "Verifies that a custom aggregated SQL expression (MIN, MAX, etc.) is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec" }, "daily_partition_import_custom_result_on_column" : { "description" : "Runs a custom query that retrieves a result of a data quality check performed in the data engineering, whose result (the severity level) is pulled from a separate table.", - "$ref" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec" } } } @@ -36020,29 +36020,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_sql_condition_failed_on_column" : { "description" : "Verifies that a custom SQL expression is met for each row. Counts the number of rows where the expression is not satisfied, and raises an issue if too many failures were detected. This check is used also to compare values between the current column and another column: `{alias}.{column} > {alias}.col_tax`. Stores the most recent captured count of failed rows for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec" }, "monthly_sql_condition_passed_percent_on_column" : { "description" : "Verifies that a minimum percentage of rows passed a custom SQL condition (expression). Reference the current column by using tokens, for example: `{alias}.{column} > {alias}.col_tax`. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec" }, "monthly_sql_aggregate_expression_on_column" : { "description" : "Verifies that a custom aggregated SQL expression (MIN, MAX, etc.) is not outside the expected range. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec" }, "monthly_import_custom_result_on_column" : { "description" : "Runs a custom query that retrieves a result of a data quality check performed in the data engineering, whose result (the severity level) is pulled from a separate table.", - "$ref" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec" } } } @@ -36096,29 +36096,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_sql_condition_failed_on_column" : { "description" : "Verifies that a custom SQL expression is met for each row. Counts the number of rows where the expression is not satisfied, and raises an issue if too many failures were detected. This check is used also to compare values between the current column and another column: `{alias}.{column} > {alias}.col_tax`. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec" }, "monthly_partition_sql_condition_passed_percent_on_column" : { "description" : "Verifies that a minimum percentage of rows passed a custom SQL condition (expression). Reference the current column by using tokens, for example: `{alias}.{column} > {alias}.col_tax`. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec" }, "monthly_partition_sql_aggregate_expression_on_column" : { "description" : "Verifies that a custom aggregated SQL expression (MIN, MAX, etc.) is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec" }, "monthly_partition_import_custom_result_on_column" : { "description" : "Runs a custom query that retrieves a result of a data quality check performed in the data engineering, whose result (the severity level) is pulled from a separate table.", - "$ref" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec" } } } @@ -36172,29 +36172,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_sql_condition_failed_on_column" : { "description" : "Verifies that a custom SQL expression is met for each row. Counts the number of rows where the expression is not satisfied, and raises an issue if too many failures were detected. This check is used also to compare values between the current column and another column: `{alias}.{column} > col_tax`.", - "$ref" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlConditionFailedCheckSpec" }, "profile_sql_condition_passed_percent_on_column" : { "description" : "Verifies that a minimum percentage of rows passed a custom SQL condition (expression). Reference the current column by using tokens, for example: `{alias}.{column} > {alias}.col_tax`.", - "$ref" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlConditionPassedPercentCheckSpec" }, "profile_sql_aggregate_expression_on_column" : { "description" : "Verifies that a custom aggregated SQL expression (MIN, MAX, etc.) is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlAggregateExpressionCheckSpec" }, "profile_import_custom_result_on_column" : { "description" : "Runs a custom query that retrieves a result of a data quality check performed in the data engineering, whose result (the severity level) is pulled from a separate table.", - "$ref" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec", - "originalRef" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec" + "originalRef" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec", + "$ref" : "#/components/schemas/ColumnSqlImportCustomResultCheckSpec" } } } @@ -36329,101 +36329,101 @@ "type" : "object", "description" : "Dictionary of custom checks. The keys are check names within this category.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "nulls" : { "description" : "Daily monitoring checks of nulls in the column", - "$ref" : "#/components/schemas/ColumnNullsDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnNullsDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnNullsDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnNullsDailyMonitoringChecksSpec" }, "uniqueness" : { "description" : "Daily monitoring checks of uniqueness in the column", - "$ref" : "#/components/schemas/ColumnUniquenessDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDailyMonitoringChecksSpec" }, "accepted_values" : { "description" : "Configuration of accepted values checks on a column level", - "$ref" : "#/components/schemas/ColumnAcceptedValuesDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnAcceptedValuesDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnAcceptedValuesDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnAcceptedValuesDailyMonitoringChecksSpec" }, "text" : { "description" : "Daily monitoring checks of text values in the column", - "$ref" : "#/components/schemas/ColumnTextDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnTextDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnTextDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnTextDailyMonitoringChecksSpec" }, "whitespace" : { "description" : "Configuration of column level checks that detect blank and whitespace values", - "$ref" : "#/components/schemas/ColumnWhitespaceDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceDailyMonitoringChecksSpec" }, "conversions" : { "description" : "Configuration of conversion testing checks on a column level.", - "$ref" : "#/components/schemas/ColumnConversionsDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnConversionsDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnConversionsDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnConversionsDailyMonitoringChecksSpec" }, "patterns" : { "description" : "Daily monitoring checks of pattern matching on a column level", - "$ref" : "#/components/schemas/ColumnPatternsDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnPatternsDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnPatternsDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnPatternsDailyMonitoringChecksSpec" }, "pii" : { "description" : "Daily monitoring checks of Personal Identifiable Information (PII) in the column", - "$ref" : "#/components/schemas/ColumnPiiDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnPiiDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnPiiDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnPiiDailyMonitoringChecksSpec" }, "numeric" : { "description" : "Daily monitoring checks of numeric values in the column", - "$ref" : "#/components/schemas/ColumnNumericDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnNumericDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnNumericDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnNumericDailyMonitoringChecksSpec" }, "anomaly" : { "description" : "Daily monitoring checks of anomalies in numeric columns", - "$ref" : "#/components/schemas/ColumnAnomalyDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnAnomalyDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnAnomalyDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnAnomalyDailyMonitoringChecksSpec" }, "datetime" : { "description" : "Daily monitoring checks of datetime in the column", - "$ref" : "#/components/schemas/ColumnDatetimeDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnDatetimeDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnDatetimeDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnDatetimeDailyMonitoringChecksSpec" }, "bool" : { "description" : "Daily monitoring checks of booleans in the column", - "$ref" : "#/components/schemas/ColumnBoolDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnBoolDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnBoolDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnBoolDailyMonitoringChecksSpec" }, "integrity" : { "description" : "Daily monitoring checks of integrity in the column", - "$ref" : "#/components/schemas/ColumnIntegrityDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnIntegrityDailyMonitoringChecksSpec" }, "accuracy" : { "description" : "Daily monitoring checks of accuracy in the column", - "$ref" : "#/components/schemas/ColumnAccuracyDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnAccuracyDailyMonitoringChecksSpec" }, "custom_sql" : { "description" : "Daily monitoring checks of custom SQL checks in the column", - "$ref" : "#/components/schemas/ColumnCustomSqlDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnCustomSqlDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnCustomSqlDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnCustomSqlDailyMonitoringChecksSpec" }, "datatype" : { "description" : "Daily monitoring checks of datatype in the column", - "$ref" : "#/components/schemas/ColumnDatatypeDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnDatatypeDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnDatatypeDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnDatatypeDailyMonitoringChecksSpec" }, "schema" : { "description" : "Daily monitoring column schema checks", - "$ref" : "#/components/schemas/ColumnSchemaDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnSchemaDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnSchemaDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnSchemaDailyMonitoringChecksSpec" }, "comparisons" : { "type" : "object", "description" : "Dictionary of configuration of checks for table comparisons at a column level. The key that identifies each comparison must match the name of a data comparison that is configured on the parent table.", "additionalProperties" : { - "$ref" : "#/definitions/ColumnComparisonDailyMonitoringChecksSpec", - "originalRef" : "#/definitions/ColumnComparisonDailyMonitoringChecksSpec" + "originalRef" : "#/definitions/ColumnComparisonDailyMonitoringChecksSpec", + "$ref" : "#/definitions/ColumnComparisonDailyMonitoringChecksSpec" } } } @@ -36549,91 +36549,91 @@ "type" : "object", "description" : "Dictionary of custom checks. The keys are check names within this category.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "nulls" : { "description" : "Daily partitioned checks of nulls in the column", - "$ref" : "#/components/schemas/ColumnNullsDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnNullsDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnNullsDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnNullsDailyPartitionedChecksSpec" }, "uniqueness" : { "description" : "Daily partitioned checks of uniqueness in the column", - "$ref" : "#/components/schemas/ColumnUniquenessDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDailyPartitionedChecksSpec" }, "accepted_values" : { "description" : "Configuration of accepted values checks on a column level", - "$ref" : "#/components/schemas/ColumnAcceptedValuesDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnAcceptedValuesDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnAcceptedValuesDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnAcceptedValuesDailyPartitionedChecksSpec" }, "text" : { "description" : "Daily partitioned checks of text values in the column", - "$ref" : "#/components/schemas/ColumnTextDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnTextDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnTextDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnTextDailyPartitionedChecksSpec" }, "whitespace" : { "description" : "Configuration of column level checks that detect blank and whitespace values", - "$ref" : "#/components/schemas/ColumnWhitespaceDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceDailyPartitionedChecksSpec" }, "conversions" : { "description" : "Configuration of conversion testing checks on a column level.", - "$ref" : "#/components/schemas/ColumnConversionsDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnConversionsDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnConversionsDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnConversionsDailyPartitionedChecksSpec" }, "patterns" : { "description" : "Daily partitioned pattern match checks on a column level", - "$ref" : "#/components/schemas/ColumnPatternsDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnPatternsDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnPatternsDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnPatternsDailyPartitionedChecksSpec" }, "pii" : { "description" : "Daily partitioned checks of Personal Identifiable Information (PII) in the column", - "$ref" : "#/components/schemas/ColumnPiiDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnPiiDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnPiiDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnPiiDailyPartitionedChecksSpec" }, "numeric" : { "description" : "Daily partitioned checks of numeric values in the column", - "$ref" : "#/components/schemas/ColumnNumericDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnNumericDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnNumericDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnNumericDailyPartitionedChecksSpec" }, "anomaly" : { "description" : "Daily partitioned checks for anomalies in numeric columns", - "$ref" : "#/components/schemas/ColumnAnomalyDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnAnomalyDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnAnomalyDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnAnomalyDailyPartitionedChecksSpec" }, "datetime" : { "description" : "Daily partitioned checks of datetime in the column", - "$ref" : "#/components/schemas/ColumnDatetimeDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnDatetimeDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnDatetimeDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnDatetimeDailyPartitionedChecksSpec" }, "bool" : { "description" : "Daily partitioned checks for booleans in the column", - "$ref" : "#/components/schemas/ColumnBoolDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnBoolDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnBoolDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnBoolDailyPartitionedChecksSpec" }, "integrity" : { "description" : "Daily partitioned checks for integrity in the column", - "$ref" : "#/components/schemas/ColumnIntegrityDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnIntegrityDailyPartitionedChecksSpec" }, "custom_sql" : { "description" : "Daily partitioned checks using custom SQL expressions evaluated on the column", - "$ref" : "#/components/schemas/ColumnCustomSqlDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnCustomSqlDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnCustomSqlDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnCustomSqlDailyPartitionedChecksSpec" }, "datatype" : { "description" : "Daily partitioned checks for datatype in the column", - "$ref" : "#/components/schemas/ColumnDatatypeDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnDatatypeDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnDatatypeDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnDatatypeDailyPartitionedChecksSpec" }, "comparisons" : { "type" : "object", "description" : "Dictionary of configuration of checks for table comparisons at a column level. The key that identifies each comparison must match the name of a data comparison that is configured on the parent table.", "additionalProperties" : { - "$ref" : "#/definitions/ColumnComparisonDailyPartitionedChecksSpec", - "originalRef" : "#/definitions/ColumnComparisonDailyPartitionedChecksSpec" + "originalRef" : "#/definitions/ColumnComparisonDailyPartitionedChecksSpec", + "$ref" : "#/definitions/ColumnComparisonDailyPartitionedChecksSpec" } } } @@ -36678,19 +36678,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_detected_datatype_in_text" : { "description" : "Detects the data type of text values stored in the column. The sensor returns the code of the detected type of column data: 1 - integers, 2 - floats, 3 - dates, 4 - datetimes, 5 - timestamps, 6 - booleans, 7 - strings, 8 - mixed data types. Raises a data quality issue when the detected data type does not match the expected data type. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec", - "originalRef" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec" + "originalRef" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec", + "$ref" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec" }, "daily_detected_datatype_in_text_changed" : { "description" : "Detects that the data type of texts stored in a text column has changed since the last verification. The sensor returns the detected type of column data: 1 - integers, 2 - floats, 3 - dates, 4 - datetimes, 5 - timestamps, 6 - booleans, 7 - strings, 8 - mixed data types. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec", - "originalRef" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec" + "originalRef" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec", + "$ref" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec" } } } @@ -36734,19 +36734,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_detected_datatype_in_text" : { "description" : "Detects the data type of text values stored in the column. The sensor returns the code of the detected type of column data: 1 - integers, 2 - floats, 3 - dates, 4 - datetimes, 5 - timestamps, 6 - booleans, 7 - strings, 8 - mixed data types. Raises a data quality issue when the detected data type does not match the expected data type. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec", - "originalRef" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec" + "originalRef" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec", + "$ref" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec" }, "daily_partition_detected_datatype_in_text_changed" : { "description" : "Detects that the data type of texts stored in a text column has changed when compared to an earlier not empty partition. The sensor returns the detected type of column data: 1 - integers, 2 - floats, 3 - dates, 4 - datetimes, 5 - timestamps, 6 - booleans, 7 - strings, 8 - mixed data types. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec", - "originalRef" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec" + "originalRef" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec", + "$ref" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec" } } } @@ -36855,15 +36855,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -36900,23 +36900,23 @@ }, "parameters" : { "description" : "The sensor parameters for a sensor that returns a value that identifies the detected type of column data: 1 - integers, 2 - floats, 3 - dates, 4 - datetimes, 5 - timestamps, 6 - booleans, 7 - strings, 8 - mixed data types.", - "$ref" : "#/components/schemas/ColumnDatatypeStringDatatypeDetectSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnDatatypeStringDatatypeDetectSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnDatatypeStringDatatypeDetectSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnDatatypeStringDatatypeDetectSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check, detects that the data type of values stored in a column has changed since the last time it was evaluated or the data type in the current daily/monthly partition differs from the data type in the previous partition.", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold that raises a data quality issue at an error severity level, detects that the data type of values stored in a column has changed since the last time it was evaluated or the data type in the current daily/monthly partition differs from the data type in the previous partition.", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem, detects that the data type of values stored in a column has changed since the last time it was evaluated or the data type in the current daily/monthly partition differs from the data type in the previous partition.", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" } } } @@ -36960,19 +36960,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_detected_datatype_in_text" : { "description" : "Detects the data type of text values stored in the column. The sensor returns the code of the detected type of column data: 1 - integers, 2 - floats, 3 - dates, 4 - datetimes, 5 - timestamps, 6 - booleans, 7 - strings, 8 - mixed data types. Raises a data quality issue when the detected data type does not match the expected data type. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec", - "originalRef" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec" + "originalRef" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec", + "$ref" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec" }, "monthly_detected_datatype_in_text_changed" : { "description" : "Detects that the data type of texts stored in a text column has changed since the last verification. The sensor returns the detected type of column data: 1 - integers, 2 - floats, 3 - dates, 4 - datetimes, 5 - timestamps, 6 - booleans, 7 - strings, 8 - mixed data types. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec", - "originalRef" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec" + "originalRef" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec", + "$ref" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec" } } } @@ -37016,19 +37016,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_detected_datatype_in_text" : { "description" : "Detects the data type of text values stored in the column. The sensor returns the code of the detected type of column data: 1 - integers, 2 - floats, 3 - dates, 4 - datetimes, 5 - timestamps, 6 - booleans, 7 - strings, 8 - mixed data types. Raises a data quality issue when the detected data type does not match the expected data type. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec", - "originalRef" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec" + "originalRef" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec", + "$ref" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec" }, "monthly_partition_detected_datatype_in_text_changed" : { "description" : "Detects that the data type of texts stored in a text column has changed when compared to an earlier not empty partition. The sensor returns the detected type of column data: 1 - integers, 2 - floats, 3 - dates, 4 - datetimes, 5 - timestamps, 6 - booleans, 7 - strings, 8 - mixed data types. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec", - "originalRef" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec" + "originalRef" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec", + "$ref" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec" } } } @@ -37072,19 +37072,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_detected_datatype_in_text" : { "description" : "Detects the data type of text values stored in the column. The sensor returns the code of the detected type of column data: 1 - integers, 2 - floats, 3 - dates, 4 - datetimes, 5 - timestamps, 6 - booleans, 7 - strings, 8 - mixed data types. Raises a data quality issue when the detected data type does not match the expected data type.", - "$ref" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec", - "originalRef" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec" + "originalRef" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec", + "$ref" : "#/components/schemas/ColumnDetectedDatatypeInTextCheckSpec" }, "profile_detected_datatype_in_text_changed" : { "description" : "Detects that the data type of texts stored in a text column has changed since the last verification. The sensor returns the detected data type of a column: 1 - integers, 2 - floats, 3 - dates, 4 - datetimes, 5 - timestamps, 6 - booleans, 7 - strings, 8 - mixed data types.", - "$ref" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec", - "originalRef" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec" + "originalRef" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec", + "$ref" : "#/components/schemas/ColumnDatatypeDetectedDatatypeInTextChangedCheckSpec" } } } @@ -37216,15 +37216,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -37261,23 +37261,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnDateInRangePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnDateInRangePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnDateInRangePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnDateInRangePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set percentage of date values in the range defined by the user in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -37435,15 +37435,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -37480,23 +37480,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnDatetimeDateValuesInFuturePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnDatetimeDateValuesInFuturePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnDatetimeDateValuesInFuturePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnDatetimeDateValuesInFuturePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set percentage of date values in future in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -37545,24 +37545,24 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_date_values_in_future_percent" : { "description" : "Detects dates in the future in date, datetime and timestamp columns. Measures a percentage of dates in the future. Raises a data quality issue when too many future dates are found. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec" }, "daily_date_in_range_percent" : { "description" : "Verifies that the dates in date, datetime, or timestamp columns are within a reasonable range of dates. The default configuration detects fake dates such as 1900-01-01 and 2099-12-31. Measures the percentage of valid dates and raises a data quality issue when too many dates are found. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDateInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDateInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDateInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDateInRangePercentCheckSpec" }, "daily_text_match_date_format_percent" : { "description" : "Verifies that the values in text columns match one of the predefined date formats, such as an ISO 8601 date. Measures the percentage of valid date strings and raises a data quality issue when too many invalid date strings are found. Creates a separate data quality check (and an alert) for each daily monitoring.", - "$ref" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec" } } } @@ -37611,24 +37611,24 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_date_values_in_future_percent" : { "description" : "Detects dates in the future in date, datetime and timestamp columns. Measures a percentage of dates in the future. Raises a data quality issue when too many future dates are found. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec" }, "daily_partition_date_in_range_percent" : { "description" : "Verifies that the dates in date, datetime, or timestamp columns are within a reasonable range of dates. The default configuration detects fake dates such as 1900-01-01 and 2099-12-31. Measures the percentage of valid dates and raises a data quality issue when too many dates are found. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnDateInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDateInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDateInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDateInRangePercentCheckSpec" }, "daily_partition_text_match_date_format_percent" : { "description" : "Verifies that the values in text columns match one of the predefined date formats, such as an ISO 8601 date. Measures the percentage of valid date strings and raises a data quality issue when too many invalid date strings are found. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec" } } } @@ -37713,24 +37713,24 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_date_values_in_future_percent" : { "description" : "Detects dates in the future in date, datetime and timestamp columns. Measures a percentage of dates in the future. Raises a data quality issue when too many future dates are found. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec" }, "monthly_date_in_range_percent" : { "description" : "Verifies that the dates in date, datetime, or timestamp columns are within a reasonable range of dates. The default configuration detects fake dates such as 1900-01-01 and 2099-12-31. Measures the percentage of valid dates and raises a data quality issue when too many dates are found. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDateInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDateInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDateInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDateInRangePercentCheckSpec" }, "monthly_text_match_date_format_percent" : { "description" : "Verifies that the values in text columns match one of the predefined date formats, such as an ISO 8601 date. Measures the percentage of valid date strings and raises a data quality issue when too many invalid date strings are found. Creates a separate data quality check (and an alert) for each monthly monitoring.", - "$ref" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec" } } } @@ -37779,24 +37779,24 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_date_values_in_future_percent" : { "description" : "Detects dates in the future in date, datetime and timestamp columns. Measures a percentage of dates in the future. Raises a data quality issue when too many future dates are found. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec" }, "monthly_partition_date_in_range_percent" : { "description" : "Verifies that the dates in date, datetime, or timestamp columns are within a reasonable range of dates. The default configuration detects fake dates such as 1900-01-01 and 2099-12-31. Measures the percentage of valid dates and raises a data quality issue when too many dates are found. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnDateInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDateInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDateInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDateInRangePercentCheckSpec" }, "monthly_partition_text_match_date_format_percent" : { "description" : "Verifies that the values in text columns match one of the predefined date formats, such as an ISO 8601 date. Measures the percentage of valid date strings and raises a data quality issue when too many invalid date strings are found. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec" } } } @@ -37845,24 +37845,24 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_date_values_in_future_percent" : { "description" : "Detects dates in the future in date, datetime and timestamp columns. Measures a percentage of dates in the future. Raises a data quality issue when too many future dates are found.", - "$ref" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDateValuesInFuturePercentCheckSpec" }, "profile_date_in_range_percent" : { "description" : "Verifies that the dates in date, datetime, or timestamp columns are within a reasonable range of dates. The default configuration detects fake dates such as 1900-01-01 and 2099-12-31. Measures the percentage of valid dates and raises a data quality issue when too many dates are found.", - "$ref" : "#/components/schemas/ColumnDateInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDateInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDateInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDateInRangePercentCheckSpec" }, "profile_text_match_date_format_percent" : { "description" : "Verifies that the values in text columns match one of the predefined date formats, such as an ISO 8601 date. Measures the percentage of valid date strings and raises a data quality issue when too many invalid date strings are found.", - "$ref" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMatchDateFormatPercentCheckSpec" } } } @@ -37971,15 +37971,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -38016,23 +38016,23 @@ }, "parameters" : { "description" : "The sensor parameters for a sensor that returns a value that identifies the detected type of column data: 1 - integers, 2 - floats, 3 - dates, 4 - datetimes, 5 - timestamps, 6 - booleans, 7 - strings, 8 - mixed data types.", - "$ref" : "#/components/schemas/ColumnDatatypeStringDatatypeDetectSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnDatatypeStringDatatypeDetectSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnDatatypeStringDatatypeDetectSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnDatatypeStringDatatypeDetectSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check, detects that the data type of values stored in a column matches an expected data type code (1..8).", - "$ref" : "#/components/schemas/DetectedDatatypeEqualsRuleParametersSpec", - "originalRef" : "#/components/schemas/DetectedDatatypeEqualsRuleParametersSpec" + "originalRef" : "#/components/schemas/DetectedDatatypeEqualsRuleParametersSpec", + "$ref" : "#/components/schemas/DetectedDatatypeEqualsRuleParametersSpec" }, "error" : { "description" : "Default alerting thresholdthat raises a data quality issue at an error severity level, detects that the data type of values stored in a column matches an expected data type code (1..8).", - "$ref" : "#/components/schemas/DetectedDatatypeEqualsRuleParametersSpec", - "originalRef" : "#/components/schemas/DetectedDatatypeEqualsRuleParametersSpec" + "originalRef" : "#/components/schemas/DetectedDatatypeEqualsRuleParametersSpec", + "$ref" : "#/components/schemas/DetectedDatatypeEqualsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem, detects that the data type of values stored in a column matches an expected data type code (1..8).", - "$ref" : "#/components/schemas/DetectedDatatypeEqualsRuleParametersSpec", - "originalRef" : "#/components/schemas/DetectedDatatypeEqualsRuleParametersSpec" + "originalRef" : "#/components/schemas/DetectedDatatypeEqualsRuleParametersSpec", + "$ref" : "#/components/schemas/DetectedDatatypeEqualsRuleParametersSpec" } } } @@ -38141,15 +38141,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -38186,23 +38186,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec" } } } @@ -38311,15 +38311,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -38356,23 +38356,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyStationaryCountValuesRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryCountValuesRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryCountValuesRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryCountValuesRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/AnomalyStationaryCountValuesRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryCountValuesRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryCountValuesRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryCountValuesRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyStationaryCountValuesRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryCountValuesRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryCountValuesRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryCountValuesRuleFatal01PctParametersSpec" } } } @@ -38481,15 +38481,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -38526,23 +38526,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec" } } } @@ -38651,15 +38651,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -38696,23 +38696,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec" } } } @@ -38821,15 +38821,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -38866,23 +38866,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec" } } } @@ -38991,15 +38991,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -39036,23 +39036,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercentRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercentRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercentRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule50ParametersSpec" } } } @@ -39161,15 +39161,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -39206,23 +39206,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/CountBetweenRuleParametersSpec", - "originalRef" : "#/components/schemas/CountBetweenRuleParametersSpec" + "originalRef" : "#/components/schemas/CountBetweenRuleParametersSpec", + "$ref" : "#/components/schemas/CountBetweenRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with nulls in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/CountBetweenRuleParametersSpec", - "originalRef" : "#/components/schemas/CountBetweenRuleParametersSpec" + "originalRef" : "#/components/schemas/CountBetweenRuleParametersSpec", + "$ref" : "#/components/schemas/CountBetweenRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/CountBetweenRuleParametersSpec", - "originalRef" : "#/components/schemas/CountBetweenRuleParametersSpec" + "originalRef" : "#/components/schemas/CountBetweenRuleParametersSpec", + "$ref" : "#/components/schemas/CountBetweenRuleParametersSpec" } } } @@ -39331,15 +39331,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -39376,23 +39376,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyStationaryPercentValuesRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentValuesRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentValuesRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentValuesRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/AnomalyStationaryPercentValuesRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentValuesRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentValuesRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentValuesRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyStationaryPercentValuesRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentValuesRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentValuesRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentValuesRuleFatal01PctParametersSpec" } } } @@ -39501,15 +39501,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -39546,23 +39546,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec" } } } @@ -39671,15 +39671,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -39716,23 +39716,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec" } } } @@ -39841,15 +39841,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -39886,23 +39886,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec" } } } @@ -40011,15 +40011,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -40056,23 +40056,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercentRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercentRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercentRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule50ParametersSpec" } } } @@ -40181,15 +40181,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -40226,23 +40226,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a minimum percentage of rows with unique value in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec" } } } @@ -40351,15 +40351,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -40396,23 +40396,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDuplicateCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDuplicateCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDuplicateCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDuplicateCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with nulls in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -40521,15 +40521,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -40566,23 +40566,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDuplicatePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDuplicatePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDuplicatePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDuplicatePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with nulls in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -40691,15 +40691,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -40736,23 +40736,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinCountRuleConstant1ParametersSpec", - "originalRef" : "#/components/schemas/MinCountRuleConstant1ParametersSpec" + "originalRef" : "#/components/schemas/MinCountRuleConstant1ParametersSpec", + "$ref" : "#/components/schemas/MinCountRuleConstant1ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with not null values in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinCountRuleConstant1ParametersSpec", - "originalRef" : "#/components/schemas/MinCountRuleConstant1ParametersSpec" + "originalRef" : "#/components/schemas/MinCountRuleConstant1ParametersSpec", + "$ref" : "#/components/schemas/MinCountRuleConstant1ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinCountRuleConstant1ParametersSpec", - "originalRef" : "#/components/schemas/MinCountRuleConstant1ParametersSpec" + "originalRef" : "#/components/schemas/MinCountRuleConstant1ParametersSpec", + "$ref" : "#/components/schemas/MinCountRuleConstant1ParametersSpec" } } } @@ -40861,15 +40861,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -40906,23 +40906,23 @@ }, "parameters" : { "description" : "Data quality check parameters that specify a list of expected numeric values that must be present in the column.", - "$ref" : "#/components/schemas/ColumnNumericExpectedNumbersInUseCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericExpectedNumbersInUseCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericExpectedNumbersInUseCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericExpectedNumbersInUseCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning when too many expected values were not found in the column.", - "$ref" : "#/components/schemas/MaxMissingRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxMissingRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxMissingRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxMissingRule0WarningParametersSpec" }, "error" : { "description" : "Alerting threshold that raises a data quality error when too many expected values were not found in the column.", - "$ref" : "#/components/schemas/MaxMissingRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxMissingRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxMissingRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxMissingRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a data quality fatal issue when too many expected values were not found in the column.", - "$ref" : "#/components/schemas/MaxMissingRule2ParametersSpec", - "originalRef" : "#/components/schemas/MaxMissingRule2ParametersSpec" + "originalRef" : "#/components/schemas/MaxMissingRule2ParametersSpec", + "$ref" : "#/components/schemas/MaxMissingRule2ParametersSpec" } } } @@ -41031,15 +41031,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -41076,23 +41076,23 @@ }, "parameters" : { "description" : "Data quality check parameters that specify a list of expected text values that must be present in the column.", - "$ref" : "#/components/schemas/ColumnStringsExpectedTextValuesInUseCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnStringsExpectedTextValuesInUseCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnStringsExpectedTextValuesInUseCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnStringsExpectedTextValuesInUseCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning when too many expected values were not found in the column.", - "$ref" : "#/components/schemas/MaxMissingRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxMissingRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxMissingRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxMissingRule0WarningParametersSpec" }, "error" : { "description" : "Alerting threshold that raises a data quality error when too many expected values were not found in the column.", - "$ref" : "#/components/schemas/MaxMissingRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxMissingRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxMissingRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxMissingRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a data quality fatal issue when too many expected values were not found in the column.", - "$ref" : "#/components/schemas/MaxMissingRule2ParametersSpec", - "originalRef" : "#/components/schemas/MaxMissingRule2ParametersSpec" + "originalRef" : "#/components/schemas/MaxMissingRule2ParametersSpec", + "$ref" : "#/components/schemas/MaxMissingRule2ParametersSpec" } } } @@ -41201,15 +41201,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -41246,23 +41246,23 @@ }, "parameters" : { "description" : "Data quality check parameters that specify a list of expected most popular text values that should be found in the column. The second parameter is 'top', which is the limit of the most popular column values to find in the tested column.", - "$ref" : "#/components/schemas/ColumnStringsExpectedTextsInTopValuesCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnStringsExpectedTextsInTopValuesCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnStringsExpectedTextsInTopValuesCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnStringsExpectedTextsInTopValuesCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning when too many expected values were not found among the TOP most popular values in the tested column.", - "$ref" : "#/components/schemas/MaxMissingRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxMissingRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxMissingRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxMissingRule0WarningParametersSpec" }, "error" : { "description" : "Alerting threshold that raises a data quality error when too many expected values were not found among the TOP most popular values in the tested column.", - "$ref" : "#/components/schemas/MaxMissingRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxMissingRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxMissingRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxMissingRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a data quality fatal issue when too many expected values were not found among the TOP most popular values in the tested column.", - "$ref" : "#/components/schemas/MaxMissingRule2ParametersSpec", - "originalRef" : "#/components/schemas/MaxMissingRule2ParametersSpec" + "originalRef" : "#/components/schemas/MaxMissingRule2ParametersSpec", + "$ref" : "#/components/schemas/MaxMissingRule2ParametersSpec" } } } @@ -41371,15 +41371,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -41416,23 +41416,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnBoolFalsePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnBoolFalsePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnBoolFalsePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnBoolFalsePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set percentage of false value in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec" } } } @@ -41541,15 +41541,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -41586,23 +41586,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericIntegerInRangePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericIntegerInRangePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericIntegerInRangePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericIntegerInRangePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of values from range in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -41646,19 +41646,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_lookup_key_not_found" : { "description" : "Detects invalid values that are not present in a dictionary table using an outer join query. Counts the number of invalid keys. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec" }, "daily_lookup_key_found_percent" : { "description" : "Measures the percentage of valid values that are present in a dictionary table. Joins this table to a dictionary table using an outer join. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec" } } } @@ -41702,19 +41702,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_lookup_key_not_found" : { "description" : "Detects invalid values that are not present in a dictionary table using an outer join query. Counts the number of invalid keys. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec" }, "daily_partition_lookup_key_found_percent" : { "description" : "Measures the percentage of valid values that are present in a dictionary table. Joins this table to a dictionary table using an outer join. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec" } } } @@ -41823,15 +41823,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -41868,23 +41868,23 @@ }, "parameters" : { "description" : "Data quality check parameters with the name of the foreign table and the column where the lookup is performed by running an outer join SQL query", - "$ref" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a minimum percentage of rows with values matching values in another table column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -42083,15 +42083,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -42128,23 +42128,23 @@ }, "parameters" : { "description" : "Data quality check parameters with the name of the foreign table and the column where the lookup is performed by running an outer join SQL query", - "$ref" : "#/components/schemas/ColumnIntegrityForeignKeyNotMatchCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityForeignKeyNotMatchCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityForeignKeyNotMatchCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnIntegrityForeignKeyNotMatchCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with values not matching values in another table column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -42188,19 +42188,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_lookup_key_not_found" : { "description" : "Detects invalid values that are not present in a dictionary table using an outer join query. Counts the number of invalid keys. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec" }, "monthly_lookup_key_found_percent" : { "description" : "Measures the percentage of valid values that are present in a dictionary table. Joins this table to a dictionary table using an outer join. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec" } } } @@ -42244,19 +42244,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_lookup_key_not_found" : { "description" : "Detects invalid values that are not present in a dictionary table using an outer join query. Counts the number of invalid keys. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec" }, "monthly_partition_lookup_key_found_percent" : { "description" : "Measures the percentage of valid values that are present in a dictionary table. Joins this table to a dictionary table using an outer join. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec" } } } @@ -42300,19 +42300,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_lookup_key_not_found" : { "description" : "Detects invalid values that are not present in a dictionary table using an outer join query. Counts the number of invalid keys.", - "$ref" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegrityLookupKeyNotFoundCountCheckSpec" }, "profile_lookup_key_found_percent" : { "description" : "Measures the percentage of valid values that are present in a dictionary table. Joins this table to a dictionary table using an outer join.", - "$ref" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegrityForeignKeyMatchPercentCheckSpec" } } } @@ -42421,15 +42421,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -42466,23 +42466,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnPatternsInvalidEmailFormatCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsInvalidEmailFormatCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsInvalidEmailFormatCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsInvalidEmailFormatCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with invalid emails in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -42591,15 +42591,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -42636,23 +42636,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnPatternsInvalidEmailFormatPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsInvalidEmailFormatPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsInvalidEmailFormatPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsInvalidEmailFormatPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with empty strings in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -42761,15 +42761,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -42806,23 +42806,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnPatternsInvalidIp4AddressFormatCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsInvalidIp4AddressFormatCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsInvalidIp4AddressFormatCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsInvalidIp4AddressFormatCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with invalid IP4 address in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -42931,15 +42931,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -42976,23 +42976,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnPatternsInvalidIp6AddressFormatCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsInvalidIp6AddressFormatCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsInvalidIp6AddressFormatCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsInvalidIp6AddressFormatCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with invalid IP6 address in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -43101,15 +43101,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -43146,23 +43146,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericInvalidLatitudeCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericInvalidLatitudeCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericInvalidLatitudeCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericInvalidLatitudeCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with invalid latitude value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -43271,15 +43271,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -43316,23 +43316,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericInvalidLongitudeCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericInvalidLongitudeCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericInvalidLongitudeCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericInvalidLongitudeCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with invalid longitude value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -43441,15 +43441,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -43486,23 +43486,23 @@ }, "parameters" : { "description" : "Numerical value in range percent sensor parameters", - "$ref" : "#/components/schemas/ColumnPatternsInvalidUsaPhoneFormatFoundSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsInvalidUsaPhoneFormatFoundSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsInvalidUsaPhoneFormatFoundSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsInvalidUsaPhoneFormatFoundSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for the minimum percentage of rows that contains a USA phone number in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -43611,15 +43611,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -43656,23 +43656,23 @@ }, "parameters" : { "description" : "Numerical value in range percent sensor parameters", - "$ref" : "#/components/schemas/ColumnPatternsInvalidUsaPhoneFormatPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsInvalidUsaPhoneFormatPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsInvalidUsaPhoneFormatPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsInvalidUsaPhoneFormatPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for the minimum percentage of rows that contains a USA phone number in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -43781,15 +43781,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -43826,23 +43826,23 @@ }, "parameters" : { "description" : "Numerical value in range percent sensor parameters", - "$ref" : "#/components/schemas/ColumnPatternsInvalidUsaZipcodeFormatFoundSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsInvalidUsaZipcodeFormatFoundSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsInvalidUsaZipcodeFormatFoundSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsInvalidUsaZipcodeFormatFoundSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for the minimum percentage of rows that contains a USA zip code number in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -43951,15 +43951,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -43996,23 +43996,23 @@ }, "parameters" : { "description" : "Numerical value in range percent sensor parameters", - "$ref" : "#/components/schemas/ColumnPatternsInvalidUsaZipcodeFormatPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsInvalidUsaZipcodeFormatPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsInvalidUsaZipcodeFormatPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsInvalidUsaZipcodeFormatPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for the minimum percentage of rows that contains a USA zip code number in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -44121,15 +44121,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -44166,23 +44166,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnPatternsInvalidUuidFormatCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsInvalidUuidFormatCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsInvalidUuidFormatCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsInvalidUuidFormatCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with invalid uuid in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -44291,15 +44291,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -44336,23 +44336,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnPatternsInvalidUuidFormatPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsInvalidUuidFormatPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsInvalidUuidFormatPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsInvalidUuidFormatPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a minimum percentage of rows with a valid UUID in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -44603,8 +44603,8 @@ }, "table" : { "description" : "Physical table name including the schema and table names.", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "column_name" : { "type" : "string", @@ -44652,43 +44652,43 @@ }, "type_snapshot" : { "description" : "Column data type that was retrieved when the table metadata was imported.", - "$ref" : "#/components/schemas/ColumnTypeSnapshotSpec", - "originalRef" : "#/components/schemas/ColumnTypeSnapshotSpec" + "originalRef" : "#/components/schemas/ColumnTypeSnapshotSpec", + "$ref" : "#/components/schemas/ColumnTypeSnapshotSpec" }, "data_quality_status" : { "description" : "The current data quality status for the column, grouped by data quality dimensions. DQOps may return a null value when the results were not yet loaded into the cache. In that case, the client should wait a few seconds and retry a call to get the most recent data quality status of the column.", - "$ref" : "#/components/schemas/ColumnCurrentDataQualityStatusModel", - "originalRef" : "#/components/schemas/ColumnCurrentDataQualityStatusModel" + "originalRef" : "#/components/schemas/ColumnCurrentDataQualityStatusModel", + "$ref" : "#/components/schemas/ColumnCurrentDataQualityStatusModel" }, "run_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run all checks within this column.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "run_profiling_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run profiling checks within this column.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "run_monitoring_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run monitoring checks within this column.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "run_partition_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run partition partitioned checks within this column.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "collect_statistics_job_template" : { "description" : "Configured parameters for the \"collect statistics\" job that should be pushed to the job queue in order to run all statistics collector within this column.", - "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters", - "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters" + "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters", + "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters" }, "data_clean_job_template" : { "description" : "Configured parameters for the \"data clean\" job that after being supplied with a time range should be pushed to the job queue in order to remove stored results connected with this column.", - "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters", - "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters" + "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters", + "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters" }, "advanced_properties" : { "type" : "object", @@ -44821,15 +44821,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -44866,23 +44866,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec" } } } @@ -44991,15 +44991,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -45036,23 +45036,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec" } } } @@ -45161,15 +45161,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -45206,23 +45206,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMaxSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum values in range in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -45331,15 +45331,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -45376,23 +45376,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec" } } } @@ -45501,15 +45501,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -45546,23 +45546,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec" } } } @@ -45671,15 +45671,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -45716,23 +45716,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec" } } } @@ -45841,15 +45841,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -45886,23 +45886,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec" } } } @@ -46011,15 +46011,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -46056,23 +46056,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercentRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercentRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercentRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule50ParametersSpec" } } } @@ -46181,15 +46181,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -46226,23 +46226,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a mean in range in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -46351,15 +46351,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -46396,23 +46396,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec" } } } @@ -46521,15 +46521,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -46566,23 +46566,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec" } } } @@ -46691,15 +46691,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -46736,23 +46736,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec" } } } @@ -46861,15 +46861,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -46906,23 +46906,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec" } } } @@ -47031,15 +47031,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -47076,23 +47076,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercentRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercentRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercentRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule50ParametersSpec" } } } @@ -47201,15 +47201,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -47246,23 +47246,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a median in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -47371,15 +47371,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -47416,23 +47416,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMinSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMinSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMinSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMinSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec" } } } @@ -47541,15 +47541,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -47586,23 +47586,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMinSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMinSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMinSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMinSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec" } } } @@ -47711,15 +47711,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -47756,23 +47756,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericMinSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMinSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMinSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMinSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a minimum values in range in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -47839,8 +47839,8 @@ }, "table" : { "description" : "Physical table name including the schema and table names.", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "column_name" : { "type" : "string", @@ -47853,8 +47853,8 @@ }, "spec" : { "description" : "Full column specification.", - "$ref" : "#/components/schemas/ColumnSpec", - "originalRef" : "#/components/schemas/ColumnSpec" + "originalRef" : "#/components/schemas/ColumnSpec", + "$ref" : "#/components/schemas/ColumnSpec" }, "can_edit" : { "type" : "boolean", @@ -47889,13 +47889,13 @@ "properties" : { "daily" : { "description" : "Configuration of daily monitoring evaluated at a column level.", - "$ref" : "#/components/schemas/ColumnDailyMonitoringCheckCategoriesSpec", - "originalRef" : "#/components/schemas/ColumnDailyMonitoringCheckCategoriesSpec" + "originalRef" : "#/components/schemas/ColumnDailyMonitoringCheckCategoriesSpec", + "$ref" : "#/components/schemas/ColumnDailyMonitoringCheckCategoriesSpec" }, "monthly" : { "description" : "Configuration of monthly monitoring evaluated at a column level.", - "$ref" : "#/components/schemas/ColumnMonthlyMonitoringCheckCategoriesSpec", - "originalRef" : "#/components/schemas/ColumnMonthlyMonitoringCheckCategoriesSpec" + "originalRef" : "#/components/schemas/ColumnMonthlyMonitoringCheckCategoriesSpec", + "$ref" : "#/components/schemas/ColumnMonthlyMonitoringCheckCategoriesSpec" } } } @@ -48025,96 +48025,96 @@ "type" : "object", "description" : "Dictionary of custom checks. The keys are check names within this category.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "nulls" : { "description" : "Monthly monitoring checks of nulls in the column", - "$ref" : "#/components/schemas/ColumnNullsMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnNullsMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnNullsMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnNullsMonthlyMonitoringChecksSpec" }, "uniqueness" : { "description" : "Monthly monitoring checks of uniqueness in the column", - "$ref" : "#/components/schemas/ColumnUniquenessMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnUniquenessMonthlyMonitoringChecksSpec" }, "accepted_values" : { "description" : "Configuration of accepted values checks on a column level", - "$ref" : "#/components/schemas/ColumnAcceptedValuesMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnAcceptedValuesMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnAcceptedValuesMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnAcceptedValuesMonthlyMonitoringChecksSpec" }, "text" : { "description" : "Monthly monitoring checks of text values in the column", - "$ref" : "#/components/schemas/ColumnTextMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnTextMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnTextMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnTextMonthlyMonitoringChecksSpec" }, "whitespace" : { "description" : "Configuration of column level checks that detect blank and whitespace values", - "$ref" : "#/components/schemas/ColumnWhitespaceMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceMonthlyMonitoringChecksSpec" }, "conversions" : { "description" : "Configuration of conversion testing checks on a column level.", - "$ref" : "#/components/schemas/ColumnConversionsMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnConversionsMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnConversionsMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnConversionsMonthlyMonitoringChecksSpec" }, "patterns" : { "description" : "Monthly monitoring checks of pattern matching on a column level", - "$ref" : "#/components/schemas/ColumnPatternsMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnPatternsMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnPatternsMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnPatternsMonthlyMonitoringChecksSpec" }, "pii" : { "description" : "Monthly monitoring checks of Personal Identifiable Information (PII) in the column", - "$ref" : "#/components/schemas/ColumnPiiMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnPiiMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnPiiMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnPiiMonthlyMonitoringChecksSpec" }, "numeric" : { "description" : "Monthly monitoring checks of numeric values in the column", - "$ref" : "#/components/schemas/ColumnNumericMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnNumericMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnNumericMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnNumericMonthlyMonitoringChecksSpec" }, "datetime" : { "description" : "Monthly monitoring checks of datetime in the column", - "$ref" : "#/components/schemas/ColumnDatetimeMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnDatetimeMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnDatetimeMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnDatetimeMonthlyMonitoringChecksSpec" }, "bool" : { "description" : "Monthly monitoring checks of booleans in the column", - "$ref" : "#/components/schemas/ColumnBoolMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnBoolMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnBoolMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnBoolMonthlyMonitoringChecksSpec" }, "integrity" : { "description" : "Monthly monitoring checks of integrity in the column", - "$ref" : "#/components/schemas/ColumnIntegrityMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnIntegrityMonthlyMonitoringChecksSpec" }, "accuracy" : { "description" : "Monthly monitoring checks of accuracy in the column", - "$ref" : "#/components/schemas/ColumnAccuracyMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnAccuracyMonthlyMonitoringChecksSpec" }, "custom_sql" : { "description" : "Monthly monitoring checks of custom SQL checks in the column", - "$ref" : "#/components/schemas/ColumnCustomSqlMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnCustomSqlMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnCustomSqlMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnCustomSqlMonthlyMonitoringChecksSpec" }, "datatype" : { "description" : "Monthly monitoring checks of datatype in the column", - "$ref" : "#/components/schemas/ColumnDatatypeMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnDatatypeMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnDatatypeMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnDatatypeMonthlyMonitoringChecksSpec" }, "schema" : { "description" : "Monthly monitoring column schema checks", - "$ref" : "#/components/schemas/ColumnSchemaMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/ColumnSchemaMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/ColumnSchemaMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/ColumnSchemaMonthlyMonitoringChecksSpec" }, "comparisons" : { "type" : "object", "description" : "Dictionary of configuration of checks for table comparisons at a column level. The key that identifies each comparison must match the name of a data comparison that is configured on the parent table.", "additionalProperties" : { - "$ref" : "#/definitions/ColumnComparisonMonthlyMonitoringChecksSpec", - "originalRef" : "#/definitions/ColumnComparisonMonthlyMonitoringChecksSpec" + "originalRef" : "#/definitions/ColumnComparisonMonthlyMonitoringChecksSpec", + "$ref" : "#/definitions/ColumnComparisonMonthlyMonitoringChecksSpec" } } } @@ -48235,86 +48235,86 @@ "type" : "object", "description" : "Dictionary of custom checks. The keys are check names within this category.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "nulls" : { "description" : "Monthly partitioned checks of nulls in the column", - "$ref" : "#/components/schemas/ColumnNullsMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnNullsMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnNullsMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnNullsMonthlyPartitionedChecksSpec" }, "uniqueness" : { "description" : "Monthly partitioned checks of uniqueness in the column", - "$ref" : "#/components/schemas/ColumnUniquenessMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnUniquenessMonthlyPartitionedChecksSpec" }, "accepted_values" : { "description" : "Configuration of accepted values checks on a column level", - "$ref" : "#/components/schemas/ColumnAcceptedValuesMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnAcceptedValuesMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnAcceptedValuesMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnAcceptedValuesMonthlyPartitionedChecksSpec" }, "text" : { "description" : "Monthly partitioned checks of text values in the column", - "$ref" : "#/components/schemas/ColumnTextMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnTextMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnTextMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnTextMonthlyPartitionedChecksSpec" }, "whitespace" : { "description" : "Configuration of column level checks that detect blank and whitespace values", - "$ref" : "#/components/schemas/ColumnWhitespaceMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceMonthlyPartitionedChecksSpec" }, "conversions" : { "description" : "Configuration of conversion testing checks on a column level.", - "$ref" : "#/components/schemas/ColumnConversionsMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnConversionsMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnConversionsMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnConversionsMonthlyPartitionedChecksSpec" }, "patterns" : { "description" : "Monthly partitioned pattern match checks on a column level", - "$ref" : "#/components/schemas/ColumnPatternsMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnPatternsMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnPatternsMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnPatternsMonthlyPartitionedChecksSpec" }, "pii" : { "description" : "Monthly partitioned checks of Personal Identifiable Information (PII) in the column", - "$ref" : "#/components/schemas/ColumnPiiMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnPiiMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnPiiMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnPiiMonthlyPartitionedChecksSpec" }, "numeric" : { "description" : "Monthly partitioned checks of numeric values in the column", - "$ref" : "#/components/schemas/ColumnNumericMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnNumericMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnNumericMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnNumericMonthlyPartitionedChecksSpec" }, "datetime" : { "description" : "Monthly partitioned checks of datetime in the column", - "$ref" : "#/components/schemas/ColumnDatetimeMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnDatetimeMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnDatetimeMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnDatetimeMonthlyPartitionedChecksSpec" }, "bool" : { "description" : "Monthly partitioned checks for booleans in the column", - "$ref" : "#/components/schemas/ColumnBoolMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnBoolMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnBoolMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnBoolMonthlyPartitionedChecksSpec" }, "integrity" : { "description" : "Monthly partitioned checks for integrity in the column", - "$ref" : "#/components/schemas/ColumnIntegrityMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnIntegrityMonthlyPartitionedChecksSpec" }, "custom_sql" : { "description" : "Monthly partitioned checks using custom SQL expressions evaluated on the column", - "$ref" : "#/components/schemas/ColumnCustomSqlMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnCustomSqlMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnCustomSqlMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnCustomSqlMonthlyPartitionedChecksSpec" }, "datatype" : { "description" : "Monthly partitioned checks for datatype in the column", - "$ref" : "#/components/schemas/ColumnDatatypeMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/ColumnDatatypeMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/ColumnDatatypeMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/ColumnDatatypeMonthlyPartitionedChecksSpec" }, "comparisons" : { "type" : "object", "description" : "Dictionary of configuration of checks for table comparisons at a column level. The key that identifies each comparison must match the name of a data comparison that is configured on the parent table.", "additionalProperties" : { - "$ref" : "#/definitions/ColumnComparisonMonthlyPartitionedChecksSpec", - "originalRef" : "#/definitions/ColumnComparisonMonthlyPartitionedChecksSpec" + "originalRef" : "#/definitions/ColumnComparisonMonthlyPartitionedChecksSpec", + "$ref" : "#/definitions/ColumnComparisonMonthlyPartitionedChecksSpec" } } } @@ -48424,15 +48424,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -48469,23 +48469,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericNegativeCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericNegativeCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericNegativeCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericNegativeCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -48594,15 +48594,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -48639,23 +48639,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericNegativePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericNegativePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericNegativePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericNegativePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set percentage of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -48764,15 +48764,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -48809,23 +48809,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericNonNegativeCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericNonNegativeCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericNonNegativeCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericNonNegativeCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with non-negative values in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -48934,15 +48934,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -48979,23 +48979,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericNonNegativePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericNonNegativePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericNonNegativePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericNonNegativePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set percentage of rows with non-negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -49104,15 +49104,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -49149,23 +49149,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinCountRule1ParametersSpec", - "originalRef" : "#/components/schemas/MinCountRule1ParametersSpec" + "originalRef" : "#/components/schemas/MinCountRule1ParametersSpec", + "$ref" : "#/components/schemas/MinCountRule1ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with not null values in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinCountRule1ParametersSpec", - "originalRef" : "#/components/schemas/MinCountRule1ParametersSpec" + "originalRef" : "#/components/schemas/MinCountRule1ParametersSpec", + "$ref" : "#/components/schemas/MinCountRule1ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinCountRule1ParametersSpec", - "originalRef" : "#/components/schemas/MinCountRule1ParametersSpec" + "originalRef" : "#/components/schemas/MinCountRule1ParametersSpec", + "$ref" : "#/components/schemas/MinCountRule1ParametersSpec" } } } @@ -49274,15 +49274,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -49319,23 +49319,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNullsNotNullsPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNotNullsPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNotNullsPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNotNullsPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set percentage of rows with null values in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -49444,15 +49444,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -49489,23 +49489,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyStationaryPercentValuesRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentValuesRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentValuesRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentValuesRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/AnomalyStationaryPercentValuesRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentValuesRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentValuesRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentValuesRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyStationaryPercentValuesRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentValuesRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentValuesRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentValuesRuleFatal01PctParametersSpec" } } } @@ -49614,15 +49614,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -49659,23 +49659,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec" } } } @@ -49784,15 +49784,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -49829,23 +49829,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec" } } } @@ -49954,15 +49954,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -49999,23 +49999,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec" } } } @@ -50124,15 +50124,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -50169,23 +50169,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercentRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercentRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercentRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule50ParametersSpec" } } } @@ -50294,15 +50294,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -50339,23 +50339,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNullsNullsCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNullsCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNullsCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNullsCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with null values in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -50439,59 +50439,59 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_nulls_count" : { "description" : "Detects incomplete columns that contain any null values. Counts the number of rows having a null value. Raises a data quality issue when the count of null values is above a max_count threshold. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNullsCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullsCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullsCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNullsCountCheckSpec" }, "daily_nulls_percent" : { "description" : "Detects incomplete columns that contain any null values. Measures the percentage of rows having a null value. Raises a data quality issue when the percentage of null values is above a max_percent threshold. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNullsPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullsPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullsPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNullsPercentCheckSpec" }, "daily_nulls_percent_anomaly" : { "description" : "Detects day-to-day anomalies in the percentage of null values. Raises a data quality issue when the rate of null values increases or decreases too much during the last 90 days.", - "$ref" : "#/components/schemas/ColumnNullPercentAnomalyStationaryCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentAnomalyStationaryCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentAnomalyStationaryCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentAnomalyStationaryCheckSpec" }, "daily_not_nulls_count" : { "description" : "Verifies that a column contains a minimum number of non-null values. The default value of the *min_count* parameter is 1 to detect at least one value in a monitored column. Raises a data quality issue when the count of non-null values is below min_count. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNotNullsCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNotNullsCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNotNullsCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNotNullsCountCheckSpec" }, "daily_not_nulls_percent" : { "description" : "Detects columns that contain too many non-null values. Measures the percentage of rows that have non-null values. Raises a data quality issue when the percentage of non-null values is above max_percentage.", - "$ref" : "#/components/schemas/ColumnNotNullsPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNotNullsPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNotNullsPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNotNullsPercentCheckSpec" }, "daily_empty_column_found" : { "description" : "Detects empty columns that contain only null values. Counts the number of rows that have non-null values. Raises a data quality issue when the column is empty. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec" }, "daily_nulls_percent_change" : { "description" : "Verifies that the null percent value in a column changed in a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnNullPercentChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentChangeCheckSpec" }, "daily_nulls_percent_change_1_day" : { "description" : "Verifies that the null percent value in a column changed in a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnNullPercentChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentChange1DayCheckSpec" }, "daily_nulls_percent_change_7_days" : { "description" : "Verifies that the null percent value in a column changed in a fixed rate since the last readout from the last week.", - "$ref" : "#/components/schemas/ColumnNullPercentChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentChange7DaysCheckSpec" }, "daily_nulls_percent_change_30_days" : { "description" : "Verifies that the null percent value in a column changed in a fixed rate since the last readout from the last month.", - "$ref" : "#/components/schemas/ColumnNullPercentChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentChange30DaysCheckSpec" } } } @@ -50575,59 +50575,59 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_nulls_count" : { "description" : "Detects incomplete columns that contain any null values. Counts the number of rows having a null value. Raises a data quality issue when the count of null values is above a max_count threshold. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnNullsCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullsCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullsCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNullsCountCheckSpec" }, "daily_partition_nulls_percent" : { "description" : "Detects incomplete columns that contain any null values. Measures the percentage of rows having a null value. Raises a data quality issue when the percentage of null values is above a max_percent threshold. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnNullsPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullsPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullsPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNullsPercentCheckSpec" }, "daily_partition_nulls_percent_anomaly" : { "description" : "Detects day-to-day anomalies in the percentage of null values. Raises a data quality issue when the rate of null values increases or decreases too much during the last 90 days.", - "$ref" : "#/components/schemas/ColumnNullPercentAnomalyStationaryCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentAnomalyStationaryCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentAnomalyStationaryCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentAnomalyStationaryCheckSpec" }, "daily_partition_not_nulls_count" : { "description" : "Verifies that a column contains a minimum number of non-null values. The default value of the *min_count* parameter is 1 to detect at least one value in a monitored column. Raises a data quality issue when the count of non-null values is below min_count. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnNotNullsCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNotNullsCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNotNullsCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNotNullsCountCheckSpec" }, "daily_partition_not_nulls_percent" : { "description" : "Detects columns that contain too many non-null values. Measures the percentage of rows that have non-null values. Raises a data quality issue when the percentage of non-null values is above max_percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnNotNullsPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNotNullsPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNotNullsPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNotNullsPercentCheckSpec" }, "daily_partition_empty_column_found" : { "description" : "Detects empty columns that contain only null values. Counts the number of rows that have non-null values. Raises a data quality issue when the column is empty. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec" }, "daily_partition_nulls_percent_change" : { "description" : "Verifies that the null percent value in a column changed in a fixed rate since last readout.", - "$ref" : "#/components/schemas/ColumnNullPercentChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentChangeCheckSpec" }, "daily_partition_nulls_percent_change_1_day" : { "description" : "Verifies that the null percent value in a column changed in a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnNullPercentChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentChange1DayCheckSpec" }, "daily_partition_nulls_percent_change_7_days" : { "description" : "Verifies that the null percent value in a column changed in a fixed rate since the last readout from the last week.", - "$ref" : "#/components/schemas/ColumnNullPercentChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentChange7DaysCheckSpec" }, "daily_partition_nulls_percent_change_30_days" : { "description" : "Verifies that the null percent value in a column changed in a fixed rate since the last readout from the last month.", - "$ref" : "#/components/schemas/ColumnNullPercentChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentChange30DaysCheckSpec" } } } @@ -50686,34 +50686,34 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_nulls_count" : { "description" : "Detects incomplete columns that contain any null values. Counts the number of rows having a null value. Raises a data quality issue when the count of null values is above a max_count threshold.. Stores the most recent count check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNullsCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullsCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullsCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNullsCountCheckSpec" }, "monthly_nulls_percent" : { "description" : "Detects incomplete columns that contain any null values. Measures the percentage of rows having a null value. Raises a data quality issue when the percentage of null values is above a max_percent threshold. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNullsPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullsPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullsPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNullsPercentCheckSpec" }, "monthly_not_nulls_count" : { "description" : "Verifies that a column contains a minimum number of non-null values. The default value of the *min_count* parameter is 1 to detect at least one value in a monitored column. Raises a data quality issue when the count of non-null values is below min_count. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNotNullsCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNotNullsCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNotNullsCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNotNullsCountCheckSpec" }, "monthly_not_nulls_percent" : { "description" : "Detects columns that contain too many non-null values. Measures the percentage of rows that have non-null values. Raises a data quality issue when the percentage of non-null values is above max_percentage. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNotNullsPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNotNullsPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNotNullsPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNotNullsPercentCheckSpec" }, "monthly_empty_column_found" : { "description" : "Detects empty columns that contain only null values. Counts the number of rows that have non-null values. Raises a data quality issue when the column is empty. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec" } } } @@ -50772,34 +50772,34 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_nulls_count" : { "description" : "Detects incomplete columns that contain any null values. Counts the number of rows having a null value. Raises a data quality issue when the count of null values is above a max_count threshold. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnNullsCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullsCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullsCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNullsCountCheckSpec" }, "monthly_partition_nulls_percent" : { "description" : "Detects incomplete columns that contain any null values. Measures the percentage of rows having a null value. Raises a data quality issue when the percentage of null values is above a max_percent threshold. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnNullsPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullsPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullsPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNullsPercentCheckSpec" }, "monthly_partition_not_nulls_count" : { "description" : "Verifies that a column contains a minimum number of non-null values. The default value of the *min_count* parameter is 1 to detect at least one value in a monitored column. Raises a data quality issue when the count of non-null values is below min_count. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnNotNullsCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNotNullsCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNotNullsCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNotNullsCountCheckSpec" }, "monthly_partition_not_nulls_percent" : { "description" : "Detects columns that contain too many non-null values. Measures the percentage of rows that have non-null values. Raises a data quality issue when the percentage of non-null values is above max_percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnNotNullsPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNotNullsPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNotNullsPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNotNullsPercentCheckSpec" }, "monthly_partition_empty_column_found" : { "description" : "Detects empty columns that contain only null values. Counts the number of rows that have non-null values. Raises a data quality issue when the column is empty. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec" } } } @@ -50854,8 +50854,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNotNullsCountSensorParametersSpec" } } } @@ -50910,8 +50910,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnNullsNotNullsPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNotNullsPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNotNullsPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNotNullsPercentSensorParametersSpec" } } } @@ -50966,8 +50966,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnNullsNullsCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNullsCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNullsCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNullsCountSensorParametersSpec" } } } @@ -51022,8 +51022,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec" } } } @@ -51132,15 +51132,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -51177,23 +51177,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNullsNullsPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set percentage of rows with null values in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -51277,59 +51277,59 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_nulls_count" : { "description" : "Detects incomplete columns that contain any null values. Counts the number of rows having a null value. Raises a data quality issue when the count of null values is above a max_count threshold.", - "$ref" : "#/components/schemas/ColumnNullsCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullsCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullsCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNullsCountCheckSpec" }, "profile_nulls_percent" : { "description" : "Detects incomplete columns that contain any null values. Measures the percentage of rows having a null value. Raises a data quality issue when the percentage of null values is above a max_percent threshold.", - "$ref" : "#/components/schemas/ColumnNullsPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullsPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullsPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNullsPercentCheckSpec" }, "profile_nulls_percent_anomaly" : { "description" : "Detects day-to-day anomalies in the percentage of null values. Raises a data quality issue when the rate of null values increases or decreases too much during the last 90 days.", - "$ref" : "#/components/schemas/ColumnNullPercentAnomalyStationaryCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentAnomalyStationaryCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentAnomalyStationaryCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentAnomalyStationaryCheckSpec" }, "profile_not_nulls_count" : { "description" : "Verifies that a column contains a minimum number of non-null values. The default value of the *min_count* parameter is 1 to detect at least one value in a monitored column. Raises a data quality issue when the count of non-null values is below min_count.", - "$ref" : "#/components/schemas/ColumnNotNullsCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNotNullsCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNotNullsCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNotNullsCountCheckSpec" }, "profile_not_nulls_percent" : { "description" : "Detects columns that contain too many non-null values. Measures the percentage of rows that have non-null values. Raises a data quality issue when the percentage of non-null values is above max_percentage.", - "$ref" : "#/components/schemas/ColumnNotNullsPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNotNullsPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNotNullsPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNotNullsPercentCheckSpec" }, "profile_empty_column_found" : { "description" : "Detects empty columns that contain only null values. Counts the number of rows that have non-null values. Raises a data quality issue when the column is empty.", - "$ref" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnEmptyColumnFoundCheckSpec" }, "profile_nulls_percent_change" : { "description" : "Verifies that the null percent value in a column changed in a fixed rate since last readout.", - "$ref" : "#/components/schemas/ColumnNullPercentChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentChangeCheckSpec" }, "profile_nulls_percent_change_1_day" : { "description" : "Verifies that the null percent value in a column changed in a fixed rate since last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnNullPercentChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentChange1DayCheckSpec" }, "profile_nulls_percent_change_7_days" : { "description" : "Verifies that the null percent value in a column changed in a fixed rate since last readout from last week.", - "$ref" : "#/components/schemas/ColumnNullPercentChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentChange7DaysCheckSpec" }, "profile_nulls_percent_change_30_days" : { "description" : "Verifies that the null percent value in a column changed in a fixed rate since last readout from last month.", - "$ref" : "#/components/schemas/ColumnNullPercentChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnNullPercentChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnNullPercentChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnNullPercentChange30DaysCheckSpec" } } } @@ -51365,23 +51365,23 @@ "properties" : { "nulls_count" : { "description" : "Configuration of the profiler that counts null column values.", - "$ref" : "#/components/schemas/ColumnNullsNullsCountStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnNullsNullsCountStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnNullsNullsCountStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnNullsNullsCountStatisticsCollectorSpec" }, "nulls_percent" : { "description" : "Configuration of the profiler that measures the percentage of null values.", - "$ref" : "#/components/schemas/ColumnNullsNullsPercentStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnNullsNullsPercentStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnNullsNullsPercentStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnNullsNullsPercentStatisticsCollectorSpec" }, "not_nulls_count" : { "description" : "Configuration of the profiler that counts not null column values.", - "$ref" : "#/components/schemas/ColumnNullsNotNullsCountStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnNullsNotNullsCountStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnNullsNotNullsCountStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnNullsNotNullsCountStatisticsCollectorSpec" }, "not_nulls_percent" : { "description" : "Configuration of the profiler that measures the percentage of not null values.", - "$ref" : "#/components/schemas/ColumnNullsNotNullsPercentStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnNullsNotNullsPercentStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnNullsNotNullsPercentStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnNullsNotNullsPercentStatisticsCollectorSpec" } } } @@ -51490,15 +51490,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -51535,23 +51535,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericNumberAboveMaxValueCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericNumberAboveMaxValueCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericNumberAboveMaxValueCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericNumberAboveMaxValueCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with values with a value above the indicated by the user value in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -51660,15 +51660,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -51705,23 +51705,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericNumberAboveMaxValuePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericNumberAboveMaxValuePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericNumberAboveMaxValuePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericNumberAboveMaxValuePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with values with a value above the indicated by the user value in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -51830,15 +51830,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -51875,23 +51875,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericNumberBelowMinValueCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericNumberBelowMinValueCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericNumberBelowMinValueCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericNumberBelowMinValueCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with values with a value below the indicated by the user value in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -52000,15 +52000,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -52045,23 +52045,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericNumberBelowMinValuePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericNumberBelowMinValuePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericNumberBelowMinValuePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericNumberBelowMinValuePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with values with a value below the indicated by the user value in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -52170,15 +52170,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -52215,23 +52215,23 @@ }, "parameters" : { "description" : "Data quality check parameters that specify a list of expected values that are compared to the values in the tested numeric column.", - "$ref" : "#/components/schemas/ColumnNumericNumberFoundInSetPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericNumberFoundInSetPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericNumberFoundInSetPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericNumberFoundInSetPercentSensorParametersSpec" }, "warning" : { "description" : "Default alerting threshold for a percentage of rows with valid values in a column (from a set of expected values). Raises a data quality issue with at a warning severity level when the percentage of valid rows is below the minimum percentage threshold.", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a percentage of rows with valid values in a column (from a set of expected values). Raises a data quality issue with at an error severity level when the percentage of valid rows is below the minimum percentage threshold", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Default alerting threshold for a percentage of rows with valid values in a column (from a set of expected values). Raises a data quality issue with at a fatal severity level when the percentage of valid rows is below the minimum percentage threshold", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -52340,15 +52340,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -52385,23 +52385,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericNumberInRangePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericNumberInRangePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericNumberInRangePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericNumberInRangePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for set percentage of values from the range in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -52575,149 +52575,149 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_number_below_min_value" : { "description" : "The check counts the number of values in the column that are below the value defined by the user as a parameter. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec" }, "daily_number_above_max_value" : { "description" : "The check counts the number of values in the column that are above the value defined by the user as a parameter. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec" }, "daily_negative_values" : { "description" : "Verifies that the number of negative values in a column does not exceed the maximum accepted count. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNegativeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNegativeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNegativeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNegativeCountCheckSpec" }, "daily_negative_values_percent" : { "description" : "Verifies that the percentage of negative values in a column does not exceed the maximum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNegativePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNegativePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNegativePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNegativePercentCheckSpec" }, "daily_number_below_min_value_percent" : { "description" : "The check counts the percentage of values in the column that are below the value defined by the user as a parameter. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec" }, "daily_number_above_max_value_percent" : { "description" : "The check counts the percentage of values in the column that are above the value defined by the user as a parameter. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec" }, "daily_number_in_range_percent" : { "description" : "Verifies that the percentage of values from range in a column does not exceed the minimum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec" }, "daily_integer_in_range_percent" : { "description" : "Verifies that the percentage of values from range in a column does not exceed the minimum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec" }, "daily_min_in_range" : { "description" : "Verifies that the minimum value in a column is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnMinInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMinInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMinInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMinInRangeCheckSpec" }, "daily_max_in_range" : { "description" : "Verifies that the maximum value in a column is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnMaxInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMaxInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMaxInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMaxInRangeCheckSpec" }, "daily_sum_in_range" : { "description" : "Verifies that the sum of all values in a column is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSumInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSumInRangeCheckSpec" }, "daily_mean_in_range" : { "description" : "Verifies that the average (mean) of all values in a column is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnMeanInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanInRangeCheckSpec" }, "daily_median_in_range" : { "description" : "Verifies that the median of all values in a column is not outside the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnMedianInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianInRangeCheckSpec" }, "daily_percentile_in_range" : { "description" : "Verifies that the percentile of all values in a column is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPercentileInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentileInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentileInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentileInRangeCheckSpec" }, "daily_percentile_10_in_range" : { "description" : "Verifies that the percentile 10 of all values in a column is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec" }, "daily_percentile_25_in_range" : { "description" : "Verifies that the percentile 25 of all values in a column is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec" }, "daily_percentile_75_in_range" : { "description" : "Verifies that the percentile 75 of all values in a column is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec" }, "daily_percentile_90_in_range" : { "description" : "Verifies that the percentile 90 of all values in a column is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec" }, "daily_sample_stddev_in_range" : { "description" : "Verifies that the sample standard deviation of all values in a column is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec" }, "daily_population_stddev_in_range" : { "description" : "Verifies that the population standard deviation of all values in a column is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec" }, "daily_sample_variance_in_range" : { "description" : "Verifies that the sample variance of all values in a column is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec" }, "daily_population_variance_in_range" : { "description" : "Verifies that the population variance of all values in a column is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec" }, "daily_invalid_latitude" : { "description" : "Verifies that the number of invalid latitude values in a column does not exceed the maximum accepted count. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec" }, "daily_valid_latitude_percent" : { "description" : "Verifies that the percentage of valid latitude values in a column does not fall below the minimum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec" }, "daily_invalid_longitude" : { "description" : "Verifies that the number of invalid longitude values in a column does not exceed the maximum accepted count. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec" }, "daily_valid_longitude_percent" : { "description" : "Verifies that the percentage of valid longitude values in a column does not fall below the minimum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec" }, "daily_non_negative_values" : { "description" : "Verifies that the number of non-negative values in a column does not exceed the maximum accepted count. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNonNegativeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNonNegativeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNonNegativeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNonNegativeCountCheckSpec" }, "daily_non_negative_values_percent" : { "description" : "Verifies that the percentage of non-negative values in a column does not exceed the maximum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNonNegativePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNonNegativePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNonNegativePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNonNegativePercentCheckSpec" } } } @@ -52891,149 +52891,149 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_number_below_min_value" : { "description" : "The check counts the number of values in the column that are below the value defined by the user as a parameter. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec" }, "daily_partition_number_above_max_value" : { "description" : "The check counts the number of values in the column that are above the value defined by the user as a parameter. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec" }, "daily_partition_negative_values" : { "description" : "Verifies that the number of negative values in a column does not exceed the maximum accepted count. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnNegativeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNegativeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNegativeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNegativeCountCheckSpec" }, "daily_partition_negative_values_percent" : { "description" : "Verifies that the percentage of negative values in a column does not exceed the maximum accepted percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnNegativePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNegativePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNegativePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNegativePercentCheckSpec" }, "daily_partition_number_below_min_value_percent" : { "description" : "The check counts the percentage of values in the column that are below the value defined by the user as a parameter. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec" }, "daily_partition_number_above_max_value_percent" : { "description" : "The check counts the percentage of values in the column that are above the value defined by the user as a parameter. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec" }, "daily_partition_number_in_range_percent" : { "description" : "Verifies that the percentage of values from range in a column does not exceed the minimum accepted percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec" }, "daily_partition_integer_in_range_percent" : { "description" : "Verifies that the percentage of values from range in a column does not exceed the minimum accepted percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec" }, "daily_partition_min_in_range" : { "description" : "Verifies that the minimum value in a column is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnMinInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMinInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMinInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMinInRangeCheckSpec" }, "daily_partition_max_in_range" : { "description" : "Verifies that the maximum value in a column is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnMaxInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMaxInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMaxInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMaxInRangeCheckSpec" }, "daily_partition_sum_in_range" : { "description" : "Verifies that the sum of all values in a column is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnSumInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSumInRangeCheckSpec" }, "daily_partition_mean_in_range" : { "description" : "Verifies that the average (mean) of all values in a column is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnMeanInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanInRangeCheckSpec" }, "daily_partition_median_in_range" : { "description" : "Verifies that the median of all values in a column is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnMedianInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianInRangeCheckSpec" }, "daily_partition_percentile_in_range" : { "description" : "Verifies that the percentile of all values in a column is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnPercentileInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentileInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentileInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentileInRangeCheckSpec" }, "daily_partition_percentile_10_in_range" : { "description" : "Verifies that the percentile 10 of all values in a column is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec" }, "daily_partition_percentile_25_in_range" : { "description" : "Verifies that the percentile 25 of all values in a column is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec" }, "daily_partition_percentile_75_in_range" : { "description" : "Verifies that the percentile 75 of all values in a column is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec" }, "daily_partition_percentile_90_in_range" : { "description" : "Verifies that the percentile 90 of all values in a column is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec" }, "daily_partition_sample_stddev_in_range" : { "description" : "Verifies that the sample standard deviation of all values in a column is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec" }, "daily_partition_population_stddev_in_range" : { "description" : "Verifies that the population standard deviation of all values in a column is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec" }, "daily_partition_sample_variance_in_range" : { "description" : "Verifies that the sample variance of all values in a column is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec" }, "daily_partition_population_variance_in_range" : { "description" : "Verifies that the population variance of all values in a column is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec" }, "daily_partition_invalid_latitude" : { "description" : "Verifies that the number of invalid latitude values in a column does not exceed the maximum accepted count. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec" }, "daily_partition_valid_latitude_percent" : { "description" : "Verifies that the percentage of valid latitude values in a column does not fall below the minimum accepted percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec" }, "daily_partition_invalid_longitude" : { "description" : "Verifies that the number of invalid longitude values in a column does not exceed the maximum accepted count. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec" }, "daily_partition_valid_longitude_percent" : { "description" : "Verifies that the percentage of valid longitude values in a column does not fall below the minimum accepted percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec" }, "daily_partition_non_negative_values" : { "description" : "Verifies that the number of non-negative values in a column does not exceed the maximum accepted count. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnNonNegativeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNonNegativeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNonNegativeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNonNegativeCountCheckSpec" }, "daily_partition_non_negative_values_percent" : { "description" : "Verifies that the percentage of non-negative values in a column does not exceed the maximum accepted percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnNonNegativePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNonNegativePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNonNegativePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNonNegativePercentCheckSpec" } } } @@ -53452,149 +53452,149 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_number_below_min_value" : { "description" : "The check counts the number of values in the column that are below the value defined by the user as a parameter. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec" }, "monthly_number_above_max_value" : { "description" : "The check counts the number of values in the column that are above the value defined by the user as a parameter. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec" }, "monthly_negative_values" : { "description" : "Verifies that the number of negative values in a column does not exceed the maximum accepted count. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNegativeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNegativeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNegativeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNegativeCountCheckSpec" }, "monthly_negative_values_percent" : { "description" : "Verifies that the percentage of negative values in a column does not exceed the maximum accepted percentage. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNegativePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNegativePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNegativePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNegativePercentCheckSpec" }, "monthly_number_below_min_value_percent" : { "description" : "The check counts the percentage of values in the column that are below the value defined by the user as a parameter. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec" }, "monthly_number_above_max_value_percent" : { "description" : "The check counts the percentage of values in the column that are above the value defined by the user as a parameter. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec" }, "monthly_number_in_range_percent" : { "description" : "Verifies that the percentage of values from range in a column does not exceed the minimum accepted percentage. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec" }, "monthly_integer_in_range_percent" : { "description" : "Verifies that the percentage of values from range in a column does not exceed the minimum accepted percentage. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec" }, "monthly_min_in_range" : { "description" : "Verifies that the minimum value in a column does not exceed the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnMinInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMinInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMinInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMinInRangeCheckSpec" }, "monthly_max_in_range" : { "description" : "Verifies that the maximum value in a column does not exceed the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnMaxInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMaxInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMaxInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMaxInRangeCheckSpec" }, "monthly_sum_in_range" : { "description" : "Verifies that the sum of all values in a column does not exceed the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSumInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSumInRangeCheckSpec" }, "monthly_mean_in_range" : { "description" : "Verifies that the average (mean) of all values in a column does not exceed the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnMeanInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanInRangeCheckSpec" }, "monthly_median_in_range" : { "description" : "Verifies that the median of all values in a column is not outside the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnMedianInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianInRangeCheckSpec" }, "monthly_percentile_in_range" : { "description" : "Verifies that the percentile of all values in a column is not outside the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPercentileInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentileInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentileInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentileInRangeCheckSpec" }, "monthly_percentile_10_in_range" : { "description" : "Verifies that the percentile 10 of all values in a column is not outside the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec" }, "monthly_percentile_25_in_range" : { "description" : "Verifies that the percentile 25 of all values in a column is not outside the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec" }, "monthly_percentile_75_in_range" : { "description" : "Verifies that the percentile 75 of all values in a column is not outside the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec" }, "monthly_percentile_90_in_range" : { "description" : "Verifies that the percentile 90 of all values in a column is not outside the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec" }, "monthly_sample_stddev_in_range" : { "description" : "Verifies that the sample standard deviation of all values in a column is not outside the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec" }, "monthly_population_stddev_in_range" : { "description" : "Verifies that the population standard deviation of all values in a column is not outside the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec" }, "monthly_sample_variance_in_range" : { "description" : "Verifies that the sample variance of all values in a column is not outside the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec" }, "monthly_population_variance_in_range" : { "description" : "Verifies that the population variance of all values in a column is not outside the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec" }, "monthly_invalid_latitude" : { "description" : "Verifies that the number of invalid latitude values in a column does not exceed the maximum accepted count. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec" }, "monthly_valid_latitude_percent" : { "description" : "Verifies that the percentage of valid latitude values in a column does not fall below the minimum accepted percentage. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec" }, "monthly_invalid_longitude" : { "description" : "Verifies that the number of invalid longitude values in a column does not exceed the maximum accepted count. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec" }, "monthly_valid_longitude_percent" : { "description" : "Verifies that the percentage of valid longitude values in a column does not fall below the minimum accepted percentage. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec" }, "monthly_non_negative_values" : { "description" : "Verifies that the number of non-negative values in a column does not exceed the maximum accepted count. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNonNegativeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNonNegativeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNonNegativeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNonNegativeCountCheckSpec" }, "monthly_non_negative_values_percent" : { "description" : "Verifies that the percentage of non-negative values in a column does not exceed the maximum accepted percentage. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnNonNegativePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNonNegativePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNonNegativePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNonNegativePercentCheckSpec" } } } @@ -53768,149 +53768,149 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_number_below_min_value" : { "description" : "The check counts the number of values in the column that are below the value defined by the user as a parameter. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec" }, "monthly_partition_number_above_max_value" : { "description" : "The check counts the number of values in the column that are above the value defined by the user as a parameter. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec" }, "monthly_partition_negative_values" : { "description" : "Verifies that the number of negative values in a column does not exceed the maximum accepted count. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnNegativeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNegativeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNegativeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNegativeCountCheckSpec" }, "monthly_partition_negative_values_percent" : { "description" : "Verifies that the percentage of negative values in a column does not exceed the maximum accepted percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnNegativePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNegativePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNegativePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNegativePercentCheckSpec" }, "monthly_partition_number_below_min_value_percent" : { "description" : "The check counts the percentage of values in the column that are below the value defined by the user as a parameter. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec" }, "monthly_partition_number_above_max_value_percent" : { "description" : "The check counts the percentage of values in the column that are above the value defined by the user as a parameter. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec" }, "monthly_partition_number_in_range_percent" : { "description" : "Verifies that the percentage of values from range in a column does not exceed the minimum accepted percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec" }, "monthly_partition_integer_in_range_percent" : { "description" : "Verifies that the percentage of values from range in a column does not exceed the minimum accepted percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec" }, "monthly_partition_min_in_range" : { "description" : "Verifies that the minimum value in a column is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnMinInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMinInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMinInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMinInRangeCheckSpec" }, "monthly_partition_max_in_range" : { "description" : "Verifies that the maximum value in a column is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnMaxInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMaxInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMaxInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMaxInRangeCheckSpec" }, "monthly_partition_sum_in_range" : { "description" : "Verifies that the sum of all values in a column is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnSumInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSumInRangeCheckSpec" }, "monthly_partition_mean_in_range" : { "description" : "Verifies that the average (mean) of all values in a column is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnMeanInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanInRangeCheckSpec" }, "monthly_partition_median_in_range" : { "description" : "Verifies that the median of all values in a column is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnMedianInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianInRangeCheckSpec" }, "monthly_partition_percentile_in_range" : { "description" : "Verifies that the percentile of all values in a column is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnPercentileInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentileInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentileInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentileInRangeCheckSpec" }, "monthly_partition_percentile_10_in_range" : { "description" : "Verifies that the percentile 10 of all values in a column is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec" }, "monthly_partition_percentile_25_in_range" : { "description" : "Verifies that the percentile 25 of all values in a column is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec" }, "monthly_partition_percentile_75_in_range" : { "description" : "Verifies that the percentile 75 of all values in a column is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec" }, "monthly_partition_percentile_90_in_range" : { "description" : "Verifies that the percentile 90 of all values in a column is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec" }, "monthly_partition_sample_stddev_in_range" : { "description" : "Verifies that the sample standard deviation of all values in a column is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec" }, "monthly_partition_population_stddev_in_range" : { "description" : "Verifies that the population standard deviation of all values in a column is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec" }, "monthly_partition_sample_variance_in_range" : { "description" : "Verifies that the sample variance of all values in a column is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec" }, "monthly_partition_population_variance_in_range" : { "description" : "Verifies that the population variance of all values in a column is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec" }, "monthly_partition_invalid_latitude" : { "description" : "Verifies that the number of invalid latitude values in a column does not exceed the maximum accepted count. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec" }, "monthly_partition_valid_latitude_percent" : { "description" : "Verifies that the percentage of valid latitude values in a column does not fall below the minimum accepted percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec" }, "monthly_partition_invalid_longitude" : { "description" : "Verifies that the number of invalid longitude values in a column does not exceed the maximum accepted count. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec" }, "monthly_partition_valid_longitude_percent" : { "description" : "Verifies that the percentage of valid longitude values in a column does not fall below the minimum accepted percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec" }, "monthly_partition_non_negative_values" : { "description" : "Verifies that the number of non-negative values in a column does not exceed the maximum accepted count. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnNonNegativeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNonNegativeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNonNegativeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNonNegativeCountCheckSpec" }, "monthly_partition_non_negative_values_percent" : { "description" : "Verifies that the percentage of non-negative values in a column does not exceed the maximum accepted percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnNonNegativePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNonNegativePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNonNegativePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNonNegativePercentCheckSpec" } } } @@ -54640,149 +54640,149 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_number_below_min_value" : { "description" : "The check counts the number of values in the column that are below the value defined by the user as a parameter.", - "$ref" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberBelowMinValueCheckSpec" }, "profile_number_above_max_value" : { "description" : "The check counts the number of values in the column that are above the value defined by the user as a parameter.", - "$ref" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberAboveMaxValueCheckSpec" }, "profile_negative_values" : { "description" : "Verifies that the number of negative values in a column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnNegativeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNegativeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNegativeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNegativeCountCheckSpec" }, "profile_negative_values_percent" : { "description" : "Verifies that the percentage of negative values in a column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnNegativePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNegativePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNegativePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNegativePercentCheckSpec" }, "profile_number_below_min_value_percent" : { "description" : "The check counts the percentage of values in the column that are below the value defined by the user as a parameter.", - "$ref" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberBelowMinValuePercentCheckSpec" }, "profile_number_above_max_value_percent" : { "description" : "The check counts the percentage of values in the column that are above the value defined by the user as a parameter.", - "$ref" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberAboveMaxValuePercentCheckSpec" }, "profile_number_in_range_percent" : { "description" : "Verifies that the percentage of values from range in a column does not exceed the minimum accepted percentage.", - "$ref" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNumberInRangePercentCheckSpec" }, "profile_integer_in_range_percent" : { "description" : "Verifies that the percentage of values from range in a column does not exceed the minimum accepted percentage.", - "$ref" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnIntegerInRangePercentCheckSpec" }, "profile_min_in_range" : { "description" : "Verifies that the minimum value in a column is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnMinInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMinInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMinInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMinInRangeCheckSpec" }, "profile_max_in_range" : { "description" : "Verifies that the maximum value in a column is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnMaxInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMaxInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMaxInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMaxInRangeCheckSpec" }, "profile_sum_in_range" : { "description" : "Verifies that the sum of all values in a column is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnSumInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSumInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSumInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSumInRangeCheckSpec" }, "profile_mean_in_range" : { "description" : "Verifies that the average (mean) of all values in a column is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnMeanInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMeanInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMeanInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMeanInRangeCheckSpec" }, "profile_median_in_range" : { "description" : "Verifies that the median of all values in a column is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnMedianInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnMedianInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnMedianInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnMedianInRangeCheckSpec" }, "profile_percentile_in_range" : { "description" : "Verifies that the percentile of all values in a column is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnPercentileInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentileInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentileInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentileInRangeCheckSpec" }, "profile_percentile_10_in_range" : { "description" : "Verifies that the percentile 10 of all values in a column is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile10InRangeCheckSpec" }, "profile_percentile_25_in_range" : { "description" : "Verifies that the percentile 25 of all values in a column is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile25InRangeCheckSpec" }, "profile_percentile_75_in_range" : { "description" : "Verifies that the percentile 75 of all values in a column is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile75InRangeCheckSpec" }, "profile_percentile_90_in_range" : { "description" : "Verifies that the percentile 90 of all values in a column is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPercentile90InRangeCheckSpec" }, "profile_sample_stddev_in_range" : { "description" : "Verifies that the sample standard deviation of all values in a column is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSampleStddevInRangeCheckSpec" }, "profile_population_stddev_in_range" : { "description" : "Verifies that the population standard deviation of all values in a column is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPopulationStddevInRangeCheckSpec" }, "profile_sample_variance_in_range" : { "description" : "Verifies that the sample variance of all values in a column is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnSampleVarianceInRangeCheckSpec" }, "profile_population_variance_in_range" : { "description" : "Verifies that the population variance of all values in a column is not outside the expected range.", - "$ref" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec", + "$ref" : "#/components/schemas/ColumnPopulationVarianceInRangeCheckSpec" }, "profile_invalid_latitude" : { "description" : "Verifies that the number of invalid latitude values in a column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidLatitudeCountCheckSpec" }, "profile_valid_latitude_percent" : { "description" : "Verifies that the percentage of valid latitude values in a column does not fall below the minimum accepted percentage.", - "$ref" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnValidLatitudePercentCheckSpec" }, "profile_invalid_longitude" : { "description" : "Verifies that the number of invalid longitude values in a column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidLongitudeCountCheckSpec" }, "profile_valid_longitude_percent" : { "description" : "Verifies that the percentage of valid longitude values in a column does not fall below the minimum accepted percentage.", - "$ref" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnValidLongitudePercentCheckSpec" }, "profile_non_negative_values" : { "description" : "Verifies that the number of non-negative values in a column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnNonNegativeCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnNonNegativeCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnNonNegativeCountCheckSpec", + "$ref" : "#/components/schemas/ColumnNonNegativeCountCheckSpec" }, "profile_non_negative_values_percent" : { "description" : "Verifies that the percentage of non-negative values in a column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnNonNegativePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnNonNegativePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnNonNegativePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnNonNegativePercentCheckSpec" } } } @@ -54923,13 +54923,13 @@ "properties" : { "daily" : { "description" : "Configuration of day partitioned data quality checks evaluated at a column level.", - "$ref" : "#/components/schemas/ColumnDailyPartitionedCheckCategoriesSpec", - "originalRef" : "#/components/schemas/ColumnDailyPartitionedCheckCategoriesSpec" + "originalRef" : "#/components/schemas/ColumnDailyPartitionedCheckCategoriesSpec", + "$ref" : "#/components/schemas/ColumnDailyPartitionedCheckCategoriesSpec" }, "monthly" : { "description" : "Configuration of monthly partitioned data quality checks evaluated at a column level.", - "$ref" : "#/components/schemas/ColumnMonthlyPartitionedCheckCategoriesSpec", - "originalRef" : "#/components/schemas/ColumnMonthlyPartitionedCheckCategoriesSpec" + "originalRef" : "#/components/schemas/ColumnMonthlyPartitionedCheckCategoriesSpec", + "$ref" : "#/components/schemas/ColumnMonthlyPartitionedCheckCategoriesSpec" } } } @@ -55038,84 +55038,84 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_text_not_matching_regex_found" : { "description" : "Verifies that the number of text values not matching the custom regular expression pattern does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec" }, "daily_texts_not_matching_regex_percent" : { "description" : "Verifies that the percentage of strings not matching the custom regular expression pattern does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec" }, "daily_invalid_email_format_found" : { "description" : "Verifies that the number of invalid emails in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec" }, "daily_invalid_email_format_percent" : { "description" : "Verifies that the percentage of invalid emails in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec" }, "daily_text_not_matching_date_pattern_found" : { "description" : "Verifies that the number of texts not matching the date format regular expression does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec" }, "daily_text_not_matching_date_pattern_percent" : { "description" : "Verifies that the percentage of texts not matching the date format regular expression in a column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec" }, "daily_text_not_matching_name_pattern_percent" : { "description" : "Verifies that the percentage of texts not matching the name regular expression does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec" }, "daily_invalid_uuid_format_found" : { "description" : "Verifies that the number of invalid UUIDs in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec" }, "daily_invalid_uuid_format_percent" : { "description" : "Verifies that the percentage of invalid UUID in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec" }, "daily_invalid_ip4_address_format_found" : { "description" : "Verifies that the number of invalid IP4 addresses in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec" }, "daily_invalid_ip6_address_format_found" : { "description" : "Verifies that the number of invalid IP6 addresses in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec" }, "daily_invalid_usa_phone_format_found" : { "description" : "Verifies that the number of invalid USA phone numbers in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec" }, "daily_invalid_usa_zipcode_format_found" : { "description" : "Verifies that the number of invalid zip codes in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec" }, "daily_invalid_usa_phone_format_percent" : { "description" : "Verifies that the percentage of invalid USA phones number in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec" }, "daily_invalid_usa_zipcode_format_percent" : { "description" : "Verifies that the percentage of invalid USA zip code in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec" } } } @@ -55224,84 +55224,84 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_text_not_matching_regex_found" : { "description" : "Verifies that the number of text values not matching the custom regular expression pattern does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec" }, "daily_partition_texts_not_matching_regex_percent" : { "description" : "Verifies that the percentage of strings matching the custom regular expression pattern does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec" }, "daily_partition_invalid_email_format_found" : { "description" : "Verifies that the number of invalid emails in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec" }, "daily_partition_invalid_email_format_percent" : { "description" : "Verifies that the percentage of invalid emails in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec" }, "daily_partition_text_not_matching_date_pattern_found" : { "description" : "Verifies that the number of texts not matching the date format regular expression does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec" }, "daily_partition_text_not_matching_date_pattern_percent" : { "description" : "Verifies that the percentage of texts matching the date format regular expression in a column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec" }, "daily_partition_text_not_matching_name_pattern_percent" : { "description" : "Verifies that the percentage of texts matching the name regular expression does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec" }, "daily_partition_invalid_uuid_format_found" : { "description" : "Verifies that the number of invalid UUIDs in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec" }, "daily_partition_invalid_uuid_format_percent" : { "description" : "Verifies that the percentage of invalid UUID in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec" }, "daily_partition_invalid_ip4_address_format_found" : { "description" : "Verifies that the number of invalid IP4 addresses in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec" }, "daily_partition_invalid_ip6_address_format_found" : { "description" : "Verifies that the number of invalid IP6 addresses in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec" }, "daily_partition_invalid_usa_phone_format_found" : { "description" : "Verifies that the number of invalid USA phone numbers in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec" }, "daily_partition_invalid_usa_zipcode_format_found" : { "description" : "Verifies that the number of invalid zip codes in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec" }, "daily_partition_invalid_usa_phone_format_percent" : { "description" : "Verifies that the percentage of invalid USA phones number in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec" }, "daily_partition_invalid_usa_zipcode_format_percent" : { "description" : "Verifies that the percentage of invalid USA phones number in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec" } } } @@ -55640,84 +55640,84 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_text_not_matching_regex_found" : { "description" : "Verifies that the number of text values not matching the custom regular expression pattern does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec" }, "monthly_texts_not_matching_regex_percent" : { "description" : "Verifies that the percentage of strings not matching the custom regular expression pattern does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec" }, "monthly_invalid_email_format_found" : { "description" : "Verifies that the number of invalid emails in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec" }, "monthly_invalid_email_format_percent" : { "description" : "Verifies that the percentage of invalid emails in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec" }, "monthly_text_not_matching_date_pattern_found" : { "description" : "Verifies that the number of texts not matching the date format regular expression does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec" }, "monthly_text_not_matching_date_pattern_percent" : { "description" : "Verifies that the percentage of texts not matching the date format regular expression in a column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec" }, "monthly_text_not_matching_name_pattern_percent" : { "description" : "Verifies that the percentage of texts not matching the name regular expression does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec" }, "monthly_invalid_uuid_format_found" : { "description" : "Verifies that the number of invalid UUIDs in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec" }, "monthly_invalid_uuid_format_percent" : { "description" : "Verifies that the percentage of invalid UUID in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec" }, "monthly_invalid_ip4_address_format_found" : { "description" : "Verifies that the number of invalid IP4 addresses in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec" }, "monthly_invalid_ip6_address_format_found" : { "description" : "Verifies that the number of invalid IP6 addresses in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec" }, "monthly_invalid_usa_phone_format_found" : { "description" : "Verifies that the number of invalid USA phone numbers in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec" }, "monthly_invalid_usa_zipcode_format_found" : { "description" : "Verifies that the number of invalid zip codes in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec" }, "monthly_invalid_usa_phone_format_percent" : { "description" : "Verifies that the percentage of invalid USA phones number in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec" }, "monthly_invalid_usa_zipcode_format_percent" : { "description" : "Verifies that the percentage of invalid USA zip code in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec" } } } @@ -55826,84 +55826,84 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_text_not_matching_regex_found" : { "description" : "Verifies that the number of text values not matching the custom regular expression pattern does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec" }, "monthly_partition_texts_not_matching_regex_percent" : { "description" : "Verifies that the percentage of strings matching the custom regular expression pattern does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec" }, "monthly_partition_invalid_email_format_found" : { "description" : "Verifies that the number of invalid emails in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec" }, "monthly_partition_invalid_email_format_percent" : { "description" : "Verifies that the percentage of invalid emails in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec" }, "monthly_partition_text_not_matching_date_pattern_found" : { "description" : "Verifies that the number of texts not matching the date format regular expression does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec" }, "monthly_partition_text_not_matching_date_pattern_percent" : { "description" : "Verifies that the percentage of texts matching the date format regular expression in a column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec" }, "monthly_partition_text_not_matching_name_pattern_percent" : { "description" : "Verifies that the percentage of texts matching the name regular expression does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec" }, "monthly_partition_invalid_uuid_format_found" : { "description" : "Verifies that the number of invalid UUIDs in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec" }, "monthly_partition_invalid_uuid_format_percent" : { "description" : "Verifies that the percentage of invalid UUID in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec" }, "monthly_partition_invalid_ip4_address_format_found" : { "description" : "Verifies that the number of invalid IP4 addresses in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec" }, "monthly_partition_invalid_ip6_address_format_found" : { "description" : "Verifies that the number of invalid IP6 addresses in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec" }, "monthly_partition_invalid_usa_phone_format_found" : { "description" : "Verifies that the number of invalid USA phone numbers in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec" }, "monthly_partition_invalid_usa_zipcode_format_found" : { "description" : "Verifies that the number of invalid zip codes in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec" }, "monthly_partition_invalid_usa_phone_format_percent" : { "description" : "Verifies that the percentage of invalid USA phones number in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec" }, "monthly_partition_invalid_usa_zipcode_format_percent" : { "description" : "Verifies that the percentage of invalid USA phones number in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec" } } } @@ -56012,84 +56012,84 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_text_not_matching_regex_found" : { "description" : "Verifies that the number of text values not matching the custom regular expression pattern does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingRegexFoundCheckSpec" }, "profile_texts_not_matching_regex_percent" : { "description" : "Verifies that the percentage of strings not matching the custom regular expression pattern does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextsNotMatchingRegexPercentCheckSpec" }, "profile_invalid_email_format_found" : { "description" : "Verifies that the number of invalid emails in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidEmailFormatFoundCheckSpec" }, "profile_invalid_email_format_percent" : { "description" : "Verifies that the percentage of invalid emails in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidEmailFormatPercentCheckSpec" }, "profile_text_not_matching_date_pattern_found" : { "description" : "Verifies that the number of texts not matching the date format regular expression does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternFoundCheckSpec" }, "profile_text_not_matching_date_pattern_percent" : { "description" : "Verifies that the percentage of texts not matching the date format regular expression in a column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingDatePatternPercentCheckSpec" }, "profile_text_not_matching_name_pattern_percent" : { "description" : "Verifies that the percentage of texts not matching the name regular expression does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextNotMatchingNamePatternPercentCheckSpec" }, "profile_invalid_uuid_format_found" : { "description" : "Verifies that the number of invalid UUIDs in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUuidFormatFoundCheckSpec" }, "profile_invalid_uuid_format_percent" : { "description" : "Verifies that the percentage of invalid UUID in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUuidFormatPercentCheckSpec" }, "profile_invalid_ip4_address_format_found" : { "description" : "Verifies that the number of invalid IP4 addresses in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidIp4AddressFormatFoundCheckSpec" }, "profile_invalid_ip6_address_format_found" : { "description" : "Verifies that the number of invalid IP6 addresses in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidIp6AddressFormatFoundCheckSpec" }, "profile_invalid_usa_phone_format_found" : { "description" : "Verifies that the number of invalid USA phone numbers in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaPhoneFoundCheckSpec" }, "profile_invalid_usa_zipcode_format_found" : { "description" : "Verifies that the number of invalid zip codes in a text column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodeFoundCheckSpec" }, "profile_invalid_usa_phone_format_percent" : { "description" : "Verifies that the percentage of invalid USA phones number in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaPhonePercentCheckSpec" }, "profile_invalid_usa_zipcode_format_percent" : { "description" : "Verifies that the percentage of invalid USA phones number in a text column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnInvalidUsaZipcodePercentCheckSpec" } } } @@ -56353,15 +56353,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -56398,23 +56398,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericPercentile10SensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericPercentile10SensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericPercentile10SensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericPercentile10SensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a percentile 10 in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -56523,15 +56523,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -56568,23 +56568,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericPercentile25SensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericPercentile25SensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericPercentile25SensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericPercentile25SensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a percentile 25 in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -56693,15 +56693,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -56738,23 +56738,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericPercentile75SensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericPercentile75SensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericPercentile75SensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericPercentile75SensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a percentile 75 in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -56863,15 +56863,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -56908,23 +56908,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericPercentile90SensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericPercentile90SensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericPercentile90SensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericPercentile90SensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a percentile 90 in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -57033,15 +57033,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -57078,23 +57078,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericPercentileSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericPercentileSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericPercentileSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericPercentileSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a percentile in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -57203,15 +57203,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -57248,23 +57248,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnPiiContainsEmailPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsEmailPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsEmailPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsEmailPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a minimum percentage of rows that contains email values in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -57396,15 +57396,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -57441,23 +57441,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnPiiContainsIp4PercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsIp4PercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsIp4PercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsIp4PercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a minimum percentage of rows that contains IP4 values in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -57589,15 +57589,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -57634,23 +57634,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnPiiContainsIp6PercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsIp6PercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsIp6PercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsIp6PercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a minimum percentage of rows that contains IP6 values in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -57782,15 +57782,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -57827,23 +57827,23 @@ }, "parameters" : { "description" : "Numerical value in range percent sensor parameters", - "$ref" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for the minimum percentage of rows that contains a USA phone number in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -57975,15 +57975,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -58020,23 +58020,23 @@ }, "parameters" : { "description" : "Numerical value in range percent sensor parameters", - "$ref" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for the minimum percentage of rows that contains a USA zip code number in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -58118,34 +58118,34 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_contains_usa_phone_percent" : { "description" : "Detects USA phone numbers in text columns. Verifies that the percentage of rows that contains a USA phone number in a column does not exceed the maximum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec" }, "daily_contains_email_percent" : { "description" : "Detects emails in text columns. Verifies that the percentage of rows that contains emails in a column does not exceed the minimum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec" }, "daily_contains_usa_zipcode_percent" : { "description" : "Detects USA zip codes in text columns. Verifies that the percentage of rows that contains a USA zip code in a column does not exceed the maximum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec" }, "daily_contains_ip4_percent" : { "description" : "Detects IP4 addresses in text columns. Verifies that the percentage of rows that contains IP4 address values in a column does not fall below the minimum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec" }, "daily_contains_ip6_percent" : { "description" : "Detects IP6 addresses in text columns. Verifies that the percentage of rows that contains valid IP6 address values in a column does not fall below the minimum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec" } } } @@ -58204,34 +58204,34 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_contains_usa_phone_percent" : { "description" : "Detects USA phone numbers in text columns. Verifies that the percentage of rows that contains USA phone number in a column does not exceed the maximum accepted percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec" }, "daily_partition_contains_email_percent" : { "description" : "Detects emails in text columns. Verifies that the percentage of rows that contains emails in a column does not exceed the minimum accepted percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec" }, "daily_partition_contains_usa_zipcode_percent" : { "description" : "Detects USA zip codes in text columns. Verifies that the percentage of rows that contains USA zip code in a column does not exceed the maximum accepted percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec" }, "daily_partition_contains_ip4_percent" : { "description" : "Detects IP4 addresses in text columns. Verifies that the percentage of rows that contains IP4 address values in a column does not fall below the minimum accepted percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec" }, "daily_partition_contains_ip6_percent" : { "description" : "Detects IP6 addresses in text columns. Verifies that the percentage of rows that contains valid IP6 address values in a column does not fall below the minimum accepted percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec" } } } @@ -58290,34 +58290,34 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_contains_usa_phone_percent" : { "description" : "Detects USA phone numbers in text columns. Verifies that the percentage of rows that contains a USA phone number in a column does not exceed the maximum accepted percentage. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec" }, "monthly_contains_email_percent" : { "description" : "Detects emails in text columns. Verifies that the percentage of rows that contains emails in a column does not exceed the minimum accepted percentage. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec" }, "monthly_contains_usa_zipcode_percent" : { "description" : "Detects USA zip codes in text columns. Verifies that the percentage of rows that contains a USA zip code in a column does not exceed the maximum accepted percentage. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec" }, "monthly_contains_ip4_percent" : { "description" : "Detects IP4 addresses in text columns. Verifies that the percentage of rows that contains IP4 address values in a column does not fall below the minimum accepted percentage. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec" }, "monthly_contains_ip6_percent" : { "description" : "Detects IP6 addresses in text columns. Verifies that the percentage of rows that contains valid IP6 address values in a column does not fall below the minimum accepted percentage. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec" } } } @@ -58376,34 +58376,34 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_contains_usa_phone_percent" : { "description" : "Detects USA phone numbers in text columns. Verifies that the percentage of rows that contains USA phone number in a column does not exceed the maximum accepted percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec" }, "monthly_partition_contains_email_percent" : { "description" : "Detects emails in text columns. Verifies that the percentage of rows that contains emails in a column does not exceed the minimum accepted percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec" }, "monthly_partition_contains_usa_zipcode_percent" : { "description" : "Detects USA zip codes in text columns. Verifies that the percentage of rows that contains USA zip code in a column does not exceed the maximum accepted percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec" }, "monthly_partition_contains_ip4_percent" : { "description" : "Detects IP4 addresses in text columns. Verifies that the percentage of rows that contains IP4 address values in a column does not fall below the minimum accepted percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec" }, "monthly_partition_contains_ip6_percent" : { "description" : "Detects IP6 addresses in text columns. Verifies that the percentage of rows that contains valid IP6 address values in a column does not fall below the minimum accepted percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec" } } } @@ -58462,34 +58462,34 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_contains_usa_phone_percent" : { "description" : "Detects USA phone numbers in text columns. Verifies that the percentage of rows that contains USA phone number in a column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsUsaPhonePercentCheckSpec" }, "profile_contains_email_percent" : { "description" : "Detects emails in text columns. Verifies that the percentage of rows that contains valid emails in a column does not exceed the minimum accepted percentage.", - "$ref" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsEmailPercentCheckSpec" }, "profile_contains_usa_zipcode_percent" : { "description" : "Detects USA zip codes in text columns. Verifies that the percentage of rows that contains USA zip code in a column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsUsaZipcodePercentCheckSpec" }, "profile_contains_ip4_percent" : { "description" : "Detects IP4 addresses in text columns. Verifies that the percentage of rows that contains valid IP4 address values in a column does not fall below the minimum accepted percentage.", - "$ref" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsIp4PercentCheckSpec" }, "profile_contains_ip6_percent" : { "description" : "Detects IP6 addresses in text columns. Verifies that the percentage of rows that contains valid IP6 address values in a column does not fall below the minimum accepted percentage.", - "$ref" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec", + "$ref" : "#/components/schemas/ColumnPiiContainsIp6PercentCheckSpec" } } } @@ -58598,15 +58598,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -58643,23 +58643,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericPopulationStddevSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericPopulationStddevSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericPopulationStddevSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericPopulationStddevSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a population (biased) standard deviation in range in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -58768,15 +58768,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -58813,23 +58813,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericPopulationVarianceSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericPopulationVarianceSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericPopulationVarianceSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericPopulationVarianceSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a population (biased) standard deviation in range in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -58964,101 +58964,101 @@ "type" : "object", "description" : "Dictionary of custom checks. The keys are check names within this category.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "nulls" : { "description" : "Configuration of column level checks that detect null values.", - "$ref" : "#/components/schemas/ColumnNullsProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnNullsProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnNullsProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnNullsProfilingChecksSpec" }, "uniqueness" : { "description" : "Configuration of uniqueness checks on a column level.", - "$ref" : "#/components/schemas/ColumnUniquenessProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnUniquenessProfilingChecksSpec" }, "accepted_values" : { "description" : "Configuration of accepted values checks on a column level.", - "$ref" : "#/components/schemas/ColumnAcceptedValuesProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnAcceptedValuesProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnAcceptedValuesProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnAcceptedValuesProfilingChecksSpec" }, "text" : { "description" : "Configuration of column level checks that verify text values.", - "$ref" : "#/components/schemas/ColumnTextProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnTextProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnTextProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnTextProfilingChecksSpec" }, "whitespace" : { "description" : "Configuration of column level checks that detect blank and whitespace values.", - "$ref" : "#/components/schemas/ColumnWhitespaceProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceProfilingChecksSpec" }, "conversions" : { "description" : "Configuration of conversion testing checks on a column level.", - "$ref" : "#/components/schemas/ColumnConversionsProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnConversionsProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnConversionsProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnConversionsProfilingChecksSpec" }, "patterns" : { "description" : "Configuration of pattern match checks on a column level.", - "$ref" : "#/components/schemas/ColumnPatternsProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnPatternsProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnPatternsProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnPatternsProfilingChecksSpec" }, "pii" : { "description" : "Configuration of Personal Identifiable Information (PII) checks on a column level.", - "$ref" : "#/components/schemas/ColumnPiiProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnPiiProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnPiiProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnPiiProfilingChecksSpec" }, "numeric" : { "description" : "Configuration of column level checks that verify numeric values.", - "$ref" : "#/components/schemas/ColumnNumericProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnNumericProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnNumericProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnNumericProfilingChecksSpec" }, "anomaly" : { "description" : "Configuration of anomaly checks on a column level that detect anomalies in numeric columns.", - "$ref" : "#/components/schemas/ColumnAnomalyProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnAnomalyProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnAnomalyProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnAnomalyProfilingChecksSpec" }, "datetime" : { "description" : "Configuration of datetime checks on a column level.", - "$ref" : "#/components/schemas/ColumnDatetimeProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnDatetimeProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnDatetimeProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnDatetimeProfilingChecksSpec" }, "bool" : { "description" : "Configuration of booleans checks on a column level.", - "$ref" : "#/components/schemas/ColumnBoolProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnBoolProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnBoolProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnBoolProfilingChecksSpec" }, "integrity" : { "description" : "Configuration of integrity checks on a column level.", - "$ref" : "#/components/schemas/ColumnIntegrityProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnIntegrityProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnIntegrityProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnIntegrityProfilingChecksSpec" }, "accuracy" : { "description" : "Configuration of accuracy checks on a column level.", - "$ref" : "#/components/schemas/ColumnAccuracyProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnAccuracyProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnAccuracyProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnAccuracyProfilingChecksSpec" }, "custom_sql" : { "description" : "Configuration of SQL checks that use custom SQL aggregated expressions and SQL conditions in data quality checks.", - "$ref" : "#/components/schemas/ColumnCustomSqlProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnCustomSqlProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnCustomSqlProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnCustomSqlProfilingChecksSpec" }, "datatype" : { "description" : "Configuration of datatype checks on a column level.", - "$ref" : "#/components/schemas/ColumnDatatypeProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnDatatypeProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnDatatypeProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnDatatypeProfilingChecksSpec" }, "schema" : { "description" : "Configuration of schema checks on a column level.", - "$ref" : "#/components/schemas/ColumnSchemaProfilingChecksSpec", - "originalRef" : "#/components/schemas/ColumnSchemaProfilingChecksSpec" + "originalRef" : "#/components/schemas/ColumnSchemaProfilingChecksSpec", + "$ref" : "#/components/schemas/ColumnSchemaProfilingChecksSpec" }, "comparisons" : { "type" : "object", "description" : "Dictionary of configuration of checks for table comparisons at a column level. The key that identifies each comparison must match the name of a data comparison that is configured on the parent table.", "additionalProperties" : { - "$ref" : "#/definitions/ColumnComparisonProfilingChecksSpec", - "originalRef" : "#/definitions/ColumnComparisonProfilingChecksSpec" + "originalRef" : "#/definitions/ColumnComparisonProfilingChecksSpec", + "$ref" : "#/definitions/ColumnComparisonProfilingChecksSpec" } } } @@ -59141,8 +59141,8 @@ }, "target_column" : { "description" : "The filters for the target column.", - "$ref" : "#/components/schemas/TargetColumnPatternSpec", - "originalRef" : "#/components/schemas/TargetColumnPatternSpec" + "originalRef" : "#/components/schemas/TargetColumnPatternSpec", + "$ref" : "#/components/schemas/TargetColumnPatternSpec" }, "can_edit" : { "type" : "boolean", @@ -59198,8 +59198,8 @@ }, "policy_spec" : { "description" : "The quality policy specification.", - "$ref" : "#/components/schemas/ColumnQualityPolicySpec", - "originalRef" : "#/components/schemas/ColumnQualityPolicySpec" + "originalRef" : "#/components/schemas/ColumnQualityPolicySpec", + "$ref" : "#/components/schemas/ColumnQualityPolicySpec" }, "can_edit" : { "type" : "boolean", @@ -59279,23 +59279,23 @@ }, "target" : { "description" : "The target column filter that are filtering the column, table and connection on which the default checks are applied.", - "$ref" : "#/components/schemas/TargetColumnPatternSpec", - "originalRef" : "#/components/schemas/TargetColumnPatternSpec" + "originalRef" : "#/components/schemas/TargetColumnPatternSpec", + "$ref" : "#/components/schemas/TargetColumnPatternSpec" }, "profiling_checks" : { "description" : "Configuration of data quality profiling checks that are enabled. Pick a check from a category, apply the parameters and rules to enable it.", - "$ref" : "#/components/schemas/ColumnProfilingCheckCategoriesSpec", - "originalRef" : "#/components/schemas/ColumnProfilingCheckCategoriesSpec" + "originalRef" : "#/components/schemas/ColumnProfilingCheckCategoriesSpec", + "$ref" : "#/components/schemas/ColumnProfilingCheckCategoriesSpec" }, "monitoring_checks" : { "description" : "Configuration of table level monitoring checks. Monitoring checks are data quality checks that are evaluated for each period of time (daily, weekly, monthly, etc.). A monitoring check stores only the most recent data quality check result for each period of time.", - "$ref" : "#/components/schemas/ColumnMonitoringCheckCategoriesSpec", - "originalRef" : "#/components/schemas/ColumnMonitoringCheckCategoriesSpec" + "originalRef" : "#/components/schemas/ColumnMonitoringCheckCategoriesSpec", + "$ref" : "#/components/schemas/ColumnMonitoringCheckCategoriesSpec" }, "partitioned_checks" : { "description" : "Configuration of table level date/time partitioned checks. Partitioned data quality checks are evaluated for each partition separately, raising separate alerts at a partition level. The table does not need to be physically partitioned by date, it is possible to run data quality checks for each day or month of data separately.", - "$ref" : "#/components/schemas/ColumnPartitionedCheckCategoriesSpec", - "originalRef" : "#/components/schemas/ColumnPartitionedCheckCategoriesSpec" + "originalRef" : "#/components/schemas/ColumnPartitionedCheckCategoriesSpec", + "$ref" : "#/components/schemas/ColumnPartitionedCheckCategoriesSpec" } } } @@ -59350,8 +59350,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnRangeMaxValueSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnRangeMaxValueSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnRangeMaxValueSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnRangeMaxValueSensorParametersSpec" } } } @@ -59383,8 +59383,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMeanSensorParametersSpec" } } } @@ -59416,8 +59416,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericMedianSensorParametersSpec" } } } @@ -59472,8 +59472,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnRangeMinValueSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnRangeMinValueSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnRangeMinValueSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnRangeMinValueSensorParametersSpec" } } } @@ -59514,28 +59514,28 @@ "properties" : { "min_value" : { "description" : "Configuration of the profiler that finds the minimum value in the column.", - "$ref" : "#/components/schemas/ColumnRangeMinValueStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnRangeMinValueStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnRangeMinValueStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnRangeMinValueStatisticsCollectorSpec" }, "median_value" : { "description" : "Configuration of the profiler that finds the median value in the column.", - "$ref" : "#/components/schemas/ColumnRangeMedianValueStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnRangeMedianValueStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnRangeMedianValueStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnRangeMedianValueStatisticsCollectorSpec" }, "max_value" : { "description" : "Configuration of the profiler that finds the maximum value in the column.", - "$ref" : "#/components/schemas/ColumnRangeMaxValueStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnRangeMaxValueStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnRangeMaxValueStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnRangeMaxValueStatisticsCollectorSpec" }, "mean_value" : { "description" : "Configuration of the profiler that finds the mean value in the column.", - "$ref" : "#/components/schemas/ColumnRangeMeanValueStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnRangeMeanValueStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnRangeMeanValueStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnRangeMeanValueStatisticsCollectorSpec" }, "sum_value" : { "description" : "Configuration of the profiler that finds the sum value in the column.", - "$ref" : "#/components/schemas/ColumnRangeSumValueStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnRangeSumValueStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnRangeSumValueStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnRangeSumValueStatisticsCollectorSpec" } } } @@ -59567,8 +59567,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" } } } @@ -59677,15 +59677,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -59722,23 +59722,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericSampleStddevSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericSampleStddevSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericSampleStddevSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericSampleStddevSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a sample (unbiased) maximum values in range in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -59847,15 +59847,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -59892,23 +59892,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericSampleVarianceSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericSampleVarianceSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericSampleVarianceSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericSampleVarianceSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a sample (unbiased) maximum values in range in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -59976,8 +59976,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnSamplingColumnSamplesSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnSamplingColumnSamplesSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnSamplingColumnSamplesSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnSamplingColumnSamplesSensorParametersSpec" } } } @@ -59998,8 +59998,8 @@ "properties" : { "column_samples" : { "description" : "Configuration of the profiler that finds the maximum string length.", - "$ref" : "#/components/schemas/ColumnSamplingColumnSamplesStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnSamplingColumnSamplesStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnSamplingColumnSamplesStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnSamplingColumnSamplesStatisticsCollectorSpec" } } } @@ -60108,15 +60108,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -60153,23 +60153,23 @@ }, "parameters" : { "description" : "Data quality check parameters for a column exists sensor", - "$ref" : "#/components/schemas/ColumnColumnExistsSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnColumnExistsSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnColumnExistsSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnColumnExistsSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning when the column was not found.", - "$ref" : "#/components/schemas/Equals1RuleParametersSpec", - "originalRef" : "#/components/schemas/Equals1RuleParametersSpec" + "originalRef" : "#/components/schemas/Equals1RuleParametersSpec", + "$ref" : "#/components/schemas/Equals1RuleParametersSpec" }, "error" : { "description" : "Alerting threshold that raises a data quality error when the column was not found.", - "$ref" : "#/components/schemas/Equals1RuleParametersSpec", - "originalRef" : "#/components/schemas/Equals1RuleParametersSpec" + "originalRef" : "#/components/schemas/Equals1RuleParametersSpec", + "$ref" : "#/components/schemas/Equals1RuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a data quality fatal issue when the column was not found.", - "$ref" : "#/components/schemas/Equals1RuleParametersSpec", - "originalRef" : "#/components/schemas/Equals1RuleParametersSpec" + "originalRef" : "#/components/schemas/Equals1RuleParametersSpec", + "$ref" : "#/components/schemas/Equals1RuleParametersSpec" } } } @@ -60213,19 +60213,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_column_exists" : { "description" : "Checks the metadata of the monitored table and verifies if the column exists. Stores the most recent value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSchemaColumnExistsCheckSpec", - "originalRef" : "#/components/schemas/ColumnSchemaColumnExistsCheckSpec" + "originalRef" : "#/components/schemas/ColumnSchemaColumnExistsCheckSpec", + "$ref" : "#/components/schemas/ColumnSchemaColumnExistsCheckSpec" }, "daily_column_type_changed" : { "description" : "Checks the metadata of the monitored column and detects if the data type (including the length, precision, scale, nullability) has changed since the last day. Stores the most recent hash for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSchemaTypeChangedCheckSpec", - "originalRef" : "#/components/schemas/ColumnSchemaTypeChangedCheckSpec" + "originalRef" : "#/components/schemas/ColumnSchemaTypeChangedCheckSpec", + "$ref" : "#/components/schemas/ColumnSchemaTypeChangedCheckSpec" } } } @@ -60269,19 +60269,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_column_exists" : { "description" : "Checks the metadata of the monitored table and verifies if the column exists. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSchemaColumnExistsCheckSpec", - "originalRef" : "#/components/schemas/ColumnSchemaColumnExistsCheckSpec" + "originalRef" : "#/components/schemas/ColumnSchemaColumnExistsCheckSpec", + "$ref" : "#/components/schemas/ColumnSchemaColumnExistsCheckSpec" }, "monthly_column_type_changed" : { "description" : "Checks the metadata of the monitored column and detects if the data type (including the length, precision, scale, nullability) has changed since the last month. Stores the most recent hash for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnSchemaTypeChangedCheckSpec", - "originalRef" : "#/components/schemas/ColumnSchemaTypeChangedCheckSpec" + "originalRef" : "#/components/schemas/ColumnSchemaTypeChangedCheckSpec", + "$ref" : "#/components/schemas/ColumnSchemaTypeChangedCheckSpec" } } } @@ -60325,19 +60325,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_column_exists" : { "description" : "Checks the metadata of the monitored table and verifies if the column exists.", - "$ref" : "#/components/schemas/ColumnSchemaColumnExistsCheckSpec", - "originalRef" : "#/components/schemas/ColumnSchemaColumnExistsCheckSpec" + "originalRef" : "#/components/schemas/ColumnSchemaColumnExistsCheckSpec", + "$ref" : "#/components/schemas/ColumnSchemaColumnExistsCheckSpec" }, "profile_column_type_changed" : { "description" : "Checks the metadata of the monitored column and detects if the data type (including the length, precision, scale, nullability) has changed.", - "$ref" : "#/components/schemas/ColumnSchemaTypeChangedCheckSpec", - "originalRef" : "#/components/schemas/ColumnSchemaTypeChangedCheckSpec" + "originalRef" : "#/components/schemas/ColumnSchemaTypeChangedCheckSpec", + "$ref" : "#/components/schemas/ColumnSchemaTypeChangedCheckSpec" } } } @@ -60446,15 +60446,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -60491,23 +60491,23 @@ }, "parameters" : { "description" : "Column data type hash sensor parameters", - "$ref" : "#/components/schemas/ColumnColumnTypeHashSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnColumnTypeHashSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnColumnTypeHashSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnColumnTypeHashSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" }, "error" : { "description" : "Default alerting thresholdthat raises a data quality issue at an error severity level", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" } } } @@ -60619,8 +60619,8 @@ }, "type_snapshot" : { "description" : "Column data type that was retrieved when the table metadata was imported.", - "$ref" : "#/components/schemas/ColumnTypeSnapshotSpec", - "originalRef" : "#/components/schemas/ColumnTypeSnapshotSpec" + "originalRef" : "#/components/schemas/ColumnTypeSnapshotSpec", + "$ref" : "#/components/schemas/ColumnTypeSnapshotSpec" }, "id" : { "type" : "boolean", @@ -60628,23 +60628,23 @@ }, "profiling_checks" : { "description" : "Configuration of data quality profiling checks that are enabled. Pick a check from a category, apply the parameters and rules to enable it.", - "$ref" : "#/components/schemas/ColumnProfilingCheckCategoriesSpec", - "originalRef" : "#/components/schemas/ColumnProfilingCheckCategoriesSpec" + "originalRef" : "#/components/schemas/ColumnProfilingCheckCategoriesSpec", + "$ref" : "#/components/schemas/ColumnProfilingCheckCategoriesSpec" }, "monitoring_checks" : { "description" : "Configuration of column level monitoring checks. Monitoring are data quality checks that are evaluated for each period of time (daily, weekly, monthly, etc.). A monitoring stores only the most recent data quality check result for each period of time.", - "$ref" : "#/components/schemas/ColumnMonitoringCheckCategoriesSpec", - "originalRef" : "#/components/schemas/ColumnMonitoringCheckCategoriesSpec" + "originalRef" : "#/components/schemas/ColumnMonitoringCheckCategoriesSpec", + "$ref" : "#/components/schemas/ColumnMonitoringCheckCategoriesSpec" }, "partitioned_checks" : { "description" : "Configuration of column level date/time partitioned checks. Partitioned data quality checks are evaluated for each partition separately, raising separate alerts at a partition level. The table does not need to be physically partitioned by date, it is possible to run data quality checks for each day or month of data separately.", - "$ref" : "#/components/schemas/ColumnPartitionedCheckCategoriesSpec", - "originalRef" : "#/components/schemas/ColumnPartitionedCheckCategoriesSpec" + "originalRef" : "#/components/schemas/ColumnPartitionedCheckCategoriesSpec", + "$ref" : "#/components/schemas/ColumnPartitionedCheckCategoriesSpec" }, "statistics" : { "description" : "Custom configuration of a column level statistics collector (a basic profiler). Enables customization of the statistics collector settings when the collector is analysing this column.", - "$ref" : "#/components/schemas/ColumnStatisticsCollectorsRootCategoriesSpec", - "originalRef" : "#/components/schemas/ColumnStatisticsCollectorsRootCategoriesSpec" + "originalRef" : "#/components/schemas/ColumnStatisticsCollectorsRootCategoriesSpec", + "$ref" : "#/components/schemas/ColumnStatisticsCollectorsRootCategoriesSpec" }, "labels" : { "type" : "array", @@ -60657,8 +60657,8 @@ "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "advanced_properties" : { @@ -60775,15 +60775,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -60820,23 +60820,23 @@ }, "parameters" : { "description" : "Sensor parameters with the custom SQL aggregate expression that is evaluated on a column", - "$ref" : "#/components/schemas/ColumnSqlAggregatedExpressionSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnSqlAggregatedExpressionSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnSqlAggregatedExpressionSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnSqlAggregatedExpressionSensorParametersSpec" }, "warning" : { "description" : "Default alerting threshold for warnings raised when the aggregated value is above the maximum accepted value.", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for errors raised when the aggregated value is above the maximum accepted value.", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Default alerting threshold for fatal data quality issues raised when the aggregated value is above the maximum accepted value.", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -60979,15 +60979,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -61024,23 +61024,23 @@ }, "parameters" : { "description" : "Sensor parameters with the custom SQL condition (an expression that returns true/false) which is evaluated on a each row, using a {column} placeholder to reference the current column.", - "$ref" : "#/components/schemas/ColumnSqlConditionFailedCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnSqlConditionFailedCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnSqlConditionFailedCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnSqlConditionFailedCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning when a given number of rows failed the custom SQL condition (expression). The warning is considered as a passed data quality check.", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows failing the custom SQL condition (expression) that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue when a given number of rows failed the custom SQL condition (expression). A fatal issue indicates a serious data quality problem that should result in stopping the data pipelines.", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -61183,15 +61183,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -61228,23 +61228,23 @@ }, "parameters" : { "description" : "Sensor parameters with the custom SQL condition (an expression that returns true/false) which is evaluated on a each row for the given column, using a {column} placeholder to reference the current column.", - "$ref" : "#/components/schemas/ColumnSqlConditionPassedPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnSqlConditionPassedPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnSqlConditionPassedPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnSqlConditionPassedPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning when a minimum acceptable percentage of rows did not pass the custom SQL condition (expression). The warning is considered as a passed data quality check.", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a minimum acceptable percentage of rows passing the custom SQL condition (expression) that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue when a minimum acceptable percentage of rows did not pass the custom SQL condition (expression). A fatal issue indicates a serious data quality problem that should result in stopping the data pipelines.", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -61387,15 +61387,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -61432,23 +61432,23 @@ }, "parameters" : { "description" : "Sensor parameters with the custom SQL SELECT statement that queries a log table to get a result of a custom query that retrieves results from other data quality libraries.", - "$ref" : "#/components/schemas/ColumnSqlImportCustomResultSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnSqlImportCustomResultSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnSqlImportCustomResultSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnSqlImportCustomResultSensorParametersSpec" }, "warning" : { "description" : "Warning severity import rule. Activate the rule with no parameters to import custom data quality results when the custom query returns a value **1** in the *severity* result column.", - "$ref" : "#/components/schemas/ImportSeverityRuleParametersSpec", - "originalRef" : "#/components/schemas/ImportSeverityRuleParametersSpec" + "originalRef" : "#/components/schemas/ImportSeverityRuleParametersSpec", + "$ref" : "#/components/schemas/ImportSeverityRuleParametersSpec" }, "error" : { "description" : "Error severity import rule. Activate the rule with no parameters to import custom data quality results when the custom query returns a value **2** in the *severity* result column.", - "$ref" : "#/components/schemas/ImportSeverityRuleParametersSpec", - "originalRef" : "#/components/schemas/ImportSeverityRuleParametersSpec" + "originalRef" : "#/components/schemas/ImportSeverityRuleParametersSpec", + "$ref" : "#/components/schemas/ImportSeverityRuleParametersSpec" }, "fatal" : { "description" : "Fatal severity import rule. Activate the rule with no parameters to import custom data quality results when the custom query returns a value **3** in the *severity* result column.", - "$ref" : "#/components/schemas/ImportSeverityRuleParametersSpec", - "originalRef" : "#/components/schemas/ImportSeverityRuleParametersSpec" + "originalRef" : "#/components/schemas/ImportSeverityRuleParametersSpec", + "$ref" : "#/components/schemas/ImportSeverityRuleParametersSpec" } } } @@ -61528,32 +61528,32 @@ "properties" : { "nulls" : { "description" : "Configuration of null values profilers on a column level.", - "$ref" : "#/components/schemas/ColumnNullsStatisticsCollectorsSpec", - "originalRef" : "#/components/schemas/ColumnNullsStatisticsCollectorsSpec" + "originalRef" : "#/components/schemas/ColumnNullsStatisticsCollectorsSpec", + "$ref" : "#/components/schemas/ColumnNullsStatisticsCollectorsSpec" }, "text" : { "description" : "Configuration of text column profilers on a column level.", - "$ref" : "#/components/schemas/ColumnTextStatisticsCollectorsSpec", - "originalRef" : "#/components/schemas/ColumnTextStatisticsCollectorsSpec" + "originalRef" : "#/components/schemas/ColumnTextStatisticsCollectorsSpec", + "$ref" : "#/components/schemas/ColumnTextStatisticsCollectorsSpec" }, "uniqueness" : { "description" : "Configuration of profilers that analyse uniqueness of values (distinct count).", - "$ref" : "#/components/schemas/ColumnUniquenessStatisticsCollectorsSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessStatisticsCollectorsSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessStatisticsCollectorsSpec", + "$ref" : "#/components/schemas/ColumnUniquenessStatisticsCollectorsSpec" }, "range" : { "description" : "Configuration of profilers that analyse the range of values (min, max).", - "$ref" : "#/components/schemas/ColumnRangeStatisticsCollectorsSpec", - "originalRef" : "#/components/schemas/ColumnRangeStatisticsCollectorsSpec" + "originalRef" : "#/components/schemas/ColumnRangeStatisticsCollectorsSpec", + "$ref" : "#/components/schemas/ColumnRangeStatisticsCollectorsSpec" }, "sampling" : { "description" : "Configuration of profilers that collect the column samples.", - "$ref" : "#/components/schemas/ColumnSamplingStatisticsCollectorsSpec", - "originalRef" : "#/components/schemas/ColumnSamplingStatisticsCollectorsSpec" + "originalRef" : "#/components/schemas/ColumnSamplingStatisticsCollectorsSpec", + "$ref" : "#/components/schemas/ColumnSamplingStatisticsCollectorsSpec" }, "strings" : { - "$ref" : "#/components/schemas/ColumnTextStatisticsCollectorsSpec", - "originalRef" : "#/components/schemas/ColumnTextStatisticsCollectorsSpec" + "originalRef" : "#/components/schemas/ColumnTextStatisticsCollectorsSpec", + "$ref" : "#/components/schemas/ColumnTextStatisticsCollectorsSpec" } } } @@ -61644,8 +61644,8 @@ }, "table" : { "description" : "Physical table name including the schema and table names.", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "column_name" : { "type" : "string", @@ -61666,21 +61666,21 @@ }, "type_snapshot" : { "description" : "Column data type that was retrieved when the table metadata was imported.", - "$ref" : "#/components/schemas/ColumnTypeSnapshotSpec", - "originalRef" : "#/components/schemas/ColumnTypeSnapshotSpec" + "originalRef" : "#/components/schemas/ColumnTypeSnapshotSpec", + "$ref" : "#/components/schemas/ColumnTypeSnapshotSpec" }, "statistics" : { "type" : "array", "description" : "List of collected column statistics.", "items" : { - "$ref" : "#/components/schemas/StatisticsMetricModel", - "originalRef" : "#/components/schemas/StatisticsMetricModel" + "originalRef" : "#/components/schemas/StatisticsMetricModel", + "$ref" : "#/components/schemas/StatisticsMetricModel" } }, "collect_column_statistics_job_template" : { "description" : "Configured parameters for the \"collect statistics\" job that should be pushed to the job queue in order to run all statistics collectors for this column", - "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters", - "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters" + "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters", + "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters" }, "can_collect_statistics" : { "type" : "boolean", @@ -61893,15 +61893,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -61938,23 +61938,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec" } } } @@ -62063,15 +62063,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -62108,23 +62108,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyStationaryPercentileMovingAverageRuleFatal01PctParametersSpec" } } } @@ -62233,15 +62233,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -62278,23 +62278,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec" } } } @@ -62403,15 +62403,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -62448,23 +62448,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec" } } } @@ -62573,15 +62573,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -62618,23 +62618,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec" } } } @@ -62743,15 +62743,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -62788,23 +62788,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercentRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercentRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercentRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule50ParametersSpec" } } } @@ -62913,15 +62913,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -62958,23 +62958,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericSumSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericSumSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a sum in range in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -63058,59 +63058,59 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_text_min_length" : { "description" : "This check finds the length of the shortest text in a column. Then, it verifies that the minimum length is within an accepted range. It detects that the shortest text is too short. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextMinLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMinLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMinLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMinLengthCheckSpec" }, "daily_text_max_length" : { "description" : "This check finds the length of the longest text in a column. Then, it verifies that the maximum length is within an accepted range. It detects that the texts are too long or not long enough. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextMaxLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMaxLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMaxLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMaxLengthCheckSpec" }, "daily_text_mean_length" : { "description" : "Verifies that the mean (average) length of texts in a column is within an accepted range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextMeanLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMeanLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMeanLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMeanLengthCheckSpec" }, "daily_text_length_below_min_length" : { "description" : "The check counts the number of text values in the column that is below the length defined by the user as a parameter. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec" }, "daily_text_length_below_min_length_percent" : { "description" : "The check measures the percentage of text values in the column that is below the length defined by the user as a parameter. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec" }, "daily_text_length_above_max_length" : { "description" : "The check counts the number of text values in the column that is above the length defined by the user as a parameter. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec" }, "daily_text_length_above_max_length_percent" : { "description" : "The check measures the percentage of text values in the column that is above the length defined by the user as a parameter. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec" }, "daily_text_length_in_range_percent" : { "description" : "The check measures the percentage of those text values with length in the range provided by the user in the column. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec" }, "daily_min_word_count" : { "description" : "This check finds the lowest word count of text in a column. Then, it verifies that the minimum length is within an accepted range. It detects that the text contains too less words.", - "$ref" : "#/components/schemas/ColumnTextMinWordCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMinWordCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMinWordCountCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMinWordCountCheckSpec" }, "daily_max_word_count" : { "description" : "This check finds the highest word count of text in a column. Then, it verifies that the maximum length is within an accepted range. It detects that the text contains too many words.", - "$ref" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec" } } } @@ -63194,59 +63194,59 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_text_min_length" : { "description" : "This check finds the length of the shortest text in a column. Then, it verifies that the minimum length is within an accepted range. It detects that the shortest text is too short. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnTextMinLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMinLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMinLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMinLengthCheckSpec" }, "daily_partition_text_max_length" : { "description" : "This check finds the length of the longest text in a column. Then, it verifies that the maximum length is within an accepted range. It detects that the texts are too long or not long enough. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnTextMaxLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMaxLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMaxLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMaxLengthCheckSpec" }, "daily_partition_text_mean_length" : { "description" : "Verifies that the mean (average) length of texts in a column is within an accepted range. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnTextMeanLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMeanLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMeanLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMeanLengthCheckSpec" }, "daily_partition_text_length_below_min_length" : { "description" : "The check counts the number of text values in the column that is below the length defined by the user as a parameter. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec" }, "daily_partition_text_length_below_min_length_percent" : { "description" : "The check measures the percentage of text values in the column that is below the length defined by the user as a parameter. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec" }, "daily_partition_text_length_above_max_length" : { "description" : "The check counts the number of text values in the column that is above the length defined by the user as a parameter. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec" }, "daily_partition_text_length_above_max_length_percent" : { "description" : "The check measures the percentage of text values in the column that is above the length defined by the user as a parameter. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec" }, "daily_partition_text_length_in_range_percent" : { "description" : "The check measures the percentage of those text values with length in the range provided by the user in the column. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec" }, "daily_partition_min_word_count" : { "description" : "This check finds the lowest word count of text in a column. Then, it verifies that the minimum length is within an accepted range. It detects that the text contains too less words.", - "$ref" : "#/components/schemas/ColumnTextMinWordCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMinWordCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMinWordCountCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMinWordCountCheckSpec" }, "daily_partition_max_word_count" : { "description" : "This check finds the highest word count of text in a column. Then, it verifies that the maximum length is within an accepted range. It detects that the text contains too many words.", - "$ref" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec" } } } @@ -63355,15 +63355,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -63400,23 +63400,23 @@ }, "parameters" : { "description" : "Data quality check parameters that specify a list of expected values that are compared to the values in the tested text column.", - "$ref" : "#/components/schemas/ColumnAcceptedValuesTextFoundInSetPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnAcceptedValuesTextFoundInSetPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnAcceptedValuesTextFoundInSetPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnAcceptedValuesTextFoundInSetPercentSensorParametersSpec" }, "warning" : { "description" : "Default alerting threshold for a percentage of rows with valid values in a column (from a set of expected values). Raises a data quality issue with at a warning severity level when the percentage of valid rows is below the minimum percentage threshold.", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a percentage of rows with valid values in a column (from a set of expected values). Raises a data quality issue with at an error severity level when the percentage of valid rows is below the minimum percentage threshold.", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Default alerting threshold for a percentage of rows with valid values in a column (from a set of expected values). Raises a data quality issue with at a fatal severity level when the percentage of valid rows is below the minimum percentage threshold.", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -63525,15 +63525,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -63570,23 +63570,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextTextLengthAboveMaxLengthCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextLengthAboveMaxLengthCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextLengthAboveMaxLengthCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextLengthAboveMaxLengthCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with strings with a length above the indicated by the user length in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -63695,15 +63695,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -63740,23 +63740,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextTextLengthAboveMaxLengthPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextLengthAboveMaxLengthPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextLengthAboveMaxLengthPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextLengthAboveMaxLengthPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of rows with strings with a length above the indicated by the user length in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -63865,15 +63865,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -63910,23 +63910,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextTextLengthBelowMinLengthCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextLengthBelowMinLengthCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextLengthBelowMinLengthCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextLengthBelowMinLengthCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with strings with a length below the indicated by the user length in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -64035,15 +64035,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -64080,23 +64080,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextTextLengthBelowMinLengthPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextLengthBelowMinLengthPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextLengthBelowMinLengthPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextLengthBelowMinLengthPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of rows with strings with a length below the indicated by the user length in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -64205,15 +64205,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -64250,23 +64250,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextTextLengthInRangePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextLengthInRangePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextLengthInRangePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextLengthInRangePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of rows with strings with a length in the range indicated by the user in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -64375,15 +64375,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -64420,23 +64420,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextMatchDateFormatPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextMatchDateFormatPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextMatchDateFormatPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextMatchDateFormatPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of rows with matching date format in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -64577,15 +64577,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -64622,23 +64622,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextTextMaxLengthSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextMaxLengthSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextMaxLengthSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextMaxLengthSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with nulls in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec" } } } @@ -64747,15 +64747,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -64792,23 +64792,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextMaxWordCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextMaxWordCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextMaxWordCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextMaxWordCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec" } } } @@ -64863,8 +64863,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnTextMaxWordCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextMaxWordCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextMaxWordCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextMaxWordCountSensorParametersSpec" } } } @@ -64973,15 +64973,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -65018,23 +65018,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextTextMeanLengthSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextMeanLengthSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextMeanLengthSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextMeanLengthSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with nulls in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -65143,15 +65143,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -65188,23 +65188,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextTextMinLengthSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextMinLengthSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextMinLengthSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextMinLengthSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a minimum length of string in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec" } } } @@ -65313,15 +65313,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -65358,23 +65358,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextMinWordCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextMinWordCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextMinWordCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextMinWordCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenIntsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenIntsRuleParametersSpec" } } } @@ -65429,8 +65429,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnTextMinWordCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextMinWordCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextMinWordCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextMinWordCountSensorParametersSpec" } } } @@ -65514,59 +65514,59 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_text_min_length" : { "description" : "This check finds the length of the shortest text in a column. Then, it verifies that the minimum length is within an accepted range. It detects that the shortest text is too short. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextMinLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMinLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMinLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMinLengthCheckSpec" }, "monthly_text_max_length" : { "description" : "This check finds the length of the longest text in a column. Then, it verifies that the maximum length is within an accepted range. It detects that the texts are too long or not long enough. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextMaxLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMaxLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMaxLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMaxLengthCheckSpec" }, "monthly_text_mean_length" : { "description" : "Verifies that the mean (average) length of texts in a column is within an accepted range. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextMeanLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMeanLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMeanLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMeanLengthCheckSpec" }, "monthly_text_length_below_min_length" : { "description" : "The check counts the number of text values in the column that is below the length defined by the user as a parameter. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec" }, "monthly_text_length_below_min_length_percent" : { "description" : "The check measures the percentage of text values in the column that is below the length defined by the user as a parameter. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec" }, "monthly_text_length_above_max_length" : { "description" : "The check counts the number of text values in the column that is above the length defined by the user as a parameter. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec" }, "monthly_text_length_above_max_length_percent" : { "description" : "The check measures the percentage of text values in the column that is above the length defined by the user as a parameter. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec" }, "monthly_text_length_in_range_percent" : { "description" : "The check measures the percentage of those text values with length in the range provided by the user in the column. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec" }, "monthly_min_word_count" : { "description" : "This check finds the lowest word count of text in a column. Then, it verifies that the minimum length is within an accepted range. It detects that the text contains too less words.", - "$ref" : "#/components/schemas/ColumnTextMinWordCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMinWordCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMinWordCountCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMinWordCountCheckSpec" }, "monthly_max_word_count" : { "description" : "This check finds the highest word count of text in a column. Then, it verifies that the maximum length is within an accepted range. It detects that the text contains too many words.", - "$ref" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec" } } } @@ -65650,59 +65650,59 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_text_min_length" : { "description" : "This check finds the length of the shortest text in a column. Then, it verifies that the minimum length is within an accepted range. It detects that the shortest text is too short. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnTextMinLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMinLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMinLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMinLengthCheckSpec" }, "monthly_partition_text_max_length" : { "description" : "This check finds the length of the longest text in a column. Then, it verifies that the maximum length is within an accepted range. It detects that the texts are too long or not long enough. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnTextMaxLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMaxLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMaxLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMaxLengthCheckSpec" }, "monthly_partition_text_mean_length" : { "description" : "Verifies that the mean (average) length of texts in a column is within an accepted range. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnTextMeanLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMeanLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMeanLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMeanLengthCheckSpec" }, "monthly_partition_text_length_below_min_length" : { "description" : "The check counts the number of text values in the column that is below the length defined by the user as a parameter. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec" }, "monthly_partition_text_length_below_min_length_percent" : { "description" : "The check measures the percentage of text values in the column that is below the length defined by the user as a parameter. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec" }, "monthly_partition_text_length_above_max_length" : { "description" : "The check counts the number of text values in the column that is above the length defined by the user as a parameter. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec" }, "monthly_partition_text_length_above_max_length_percent" : { "description" : "The check measures the percentage of text values in the column that is above the length defined by the user as a parameter. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec" }, "monthly_partition_text_length_in_range_percent" : { "description" : "The check measures the percentage of those text values with length in the range provided by the user in the column. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec" }, "monthly_partition_min_word_count" : { "description" : "This check finds the lowest word count of text in a column. Then, it verifies that the minimum length is within an accepted range. It detects that the text contains too less words.", - "$ref" : "#/components/schemas/ColumnTextMinWordCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMinWordCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMinWordCountCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMinWordCountCheckSpec" }, "monthly_partition_max_word_count" : { "description" : "This check finds the highest word count of text in a column. Then, it verifies that the maximum length is within an accepted range. It detects that the text contains too many words.", - "$ref" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec" } } } @@ -65811,15 +65811,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -65856,23 +65856,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnPatternsTextNotMatchingDatePatternCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsTextNotMatchingDatePatternCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsTextNotMatchingDatePatternCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsTextNotMatchingDatePatternCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with not matching date regex in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -65981,15 +65981,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -66026,23 +66026,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnPatternsTextNotMatchingDatePatternPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsTextNotMatchingDatePatternPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsTextNotMatchingDatePatternPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsTextNotMatchingDatePatternPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of rows with matching date regex in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -66151,15 +66151,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -66196,23 +66196,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnPatternsTextNotMatchingNamePatternPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsTextNotMatchingNamePatternPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsTextNotMatchingNamePatternPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsTextNotMatchingNamePatternPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of rows with matching name regex in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -66321,15 +66321,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -66366,23 +66366,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnPatternsTextNotMatchingRegexCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsTextNotMatchingRegexCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsTextNotMatchingRegexCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsTextNotMatchingRegexCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with not matching regex in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -66491,15 +66491,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -66536,23 +66536,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextTextParsableToBooleanPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextParsableToBooleanPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextParsableToBooleanPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextParsableToBooleanPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a minimum percentage of rows with a boolean placeholder strings in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -66661,15 +66661,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -66706,23 +66706,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextTextParsableToDatePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextParsableToDatePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextParsableToDatePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextParsableToDatePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with nulls in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -66831,15 +66831,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -66876,23 +66876,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextTextParsableToFloatPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextParsableToFloatPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextParsableToFloatPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextParsableToFloatPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a minimum percentage of rows with a parsable to float strings in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -67001,15 +67001,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -67046,23 +67046,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextTextParsableToIntegerPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextParsableToIntegerPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextParsableToIntegerPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextParsableToIntegerPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a minimum percentage of rows with a parsable to integer strings in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -67146,59 +67146,59 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_text_min_length" : { "description" : "This check finds the length of the shortest text in a column. Then, it verifies that the minimum length is within an accepted range. It detects that the shortest text is too short.", - "$ref" : "#/components/schemas/ColumnTextMinLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMinLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMinLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMinLengthCheckSpec" }, "profile_text_max_length" : { "description" : "This check finds the length of the longest text in a column. Then, it verifies that the maximum length is within an accepted range. It detects that the texts are too long or not long enough.", - "$ref" : "#/components/schemas/ColumnTextMaxLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMaxLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMaxLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMaxLengthCheckSpec" }, "profile_text_mean_length" : { "description" : "Verifies that the mean (average) length of texts in a column is within an accepted range.", - "$ref" : "#/components/schemas/ColumnTextMeanLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMeanLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMeanLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMeanLengthCheckSpec" }, "profile_text_length_below_min_length" : { "description" : "The check counts the number of text values in the column that is below the length defined by the user as a parameter.", - "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthCheckSpec" }, "profile_text_length_below_min_length_percent" : { "description" : "The check measures the percentage of text values in the column that is below the length defined by the user as a parameter.", - "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthBelowMinLengthPercentCheckSpec" }, "profile_text_length_above_max_length" : { "description" : "The check counts the number of text values in the column that is above the length defined by the user as a parameter.", - "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthCheckSpec" }, "profile_text_length_above_max_length_percent" : { "description" : "The check measures the percentage of text values in the column that is above the length defined by the user as a parameter.", - "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthAboveMaxLengthPercentCheckSpec" }, "profile_text_length_in_range_percent" : { "description" : "The check measures the percentage of those text values with length in the range provided by the user in the column.", - "$ref" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnTextLengthInRangePercentCheckSpec" }, "profile_min_word_count" : { "description" : "This check finds the lowest word count of text in a column. Then, it verifies that the minimum length is within an accepted range. It detects that the text contains too less words.", - "$ref" : "#/components/schemas/ColumnTextMinWordCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMinWordCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMinWordCountCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMinWordCountCheckSpec" }, "profile_max_word_count" : { "description" : "This check finds the highest word count of text in a column. Then, it verifies that the maximum length is within an accepted range. It detects that the text contains too many words.", - "$ref" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec", + "$ref" : "#/components/schemas/ColumnTextMaxWordCountCheckSpec" } } } @@ -67244,33 +67244,33 @@ "properties" : { "text_max_length" : { "description" : "Configuration of the profiler that finds the maximum text length.", - "$ref" : "#/components/schemas/ColumnTextTextMaxLengthStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnTextTextMaxLengthStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnTextTextMaxLengthStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnTextTextMaxLengthStatisticsCollectorSpec" }, "text_mean_length" : { "description" : "Configuration of the profiler that finds the mean text length.", - "$ref" : "#/components/schemas/ColumnTextTextMeanLengthStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnTextTextMeanLengthStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnTextTextMeanLengthStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnTextTextMeanLengthStatisticsCollectorSpec" }, "text_min_length" : { "description" : "Configuration of the profiler that finds the min text length.", - "$ref" : "#/components/schemas/ColumnTextTextMinLengthStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnTextTextMinLengthStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnTextTextMinLengthStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnTextTextMinLengthStatisticsCollectorSpec" }, "text_datatype_detect" : { "description" : "Configuration of the profiler that detects datatype.", - "$ref" : "#/components/schemas/ColumnTextTextDatatypeDetectStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnTextTextDatatypeDetectStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnTextTextDatatypeDetectStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnTextTextDatatypeDetectStatisticsCollectorSpec" }, "text_min_word_count" : { "description" : "Configuration of the profiler that finds the estimated minimum word count.", - "$ref" : "#/components/schemas/ColumnTextMinWordCountStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnTextMinWordCountStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnTextMinWordCountStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnTextMinWordCountStatisticsCollectorSpec" }, "text_max_word_count" : { "description" : "Configuration of the profiler that finds the estimated maximum word count.", - "$ref" : "#/components/schemas/ColumnTextMaxWordCountStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnTextMaxWordCountStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnTextMaxWordCountStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnTextMaxWordCountStatisticsCollectorSpec" } } } @@ -67302,8 +67302,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnDatatypeStringDatatypeDetectSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnDatatypeStringDatatypeDetectSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnDatatypeStringDatatypeDetectSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnDatatypeStringDatatypeDetectSensorParametersSpec" } } } @@ -67551,8 +67551,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnTextTextMaxLengthSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextMaxLengthSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextMaxLengthSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextMaxLengthSensorParametersSpec" } } } @@ -67607,8 +67607,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnTextTextMeanLengthSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextMeanLengthSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextMeanLengthSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextMeanLengthSensorParametersSpec" } } } @@ -67663,8 +67663,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnTextTextMinLengthSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextMinLengthSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextMinLengthSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextMinLengthSensorParametersSpec" } } } @@ -67911,15 +67911,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -67956,23 +67956,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextTextValidCountryCodePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextValidCountryCodePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextValidCountryCodePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextValidCountryCodePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of rows with a valid country code strings in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -68081,15 +68081,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -68126,23 +68126,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnTextTextValidCurrencyCodePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnTextTextValidCurrencyCodePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnTextTextValidCurrencyCodePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnTextTextValidCurrencyCodePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of rows with a valid currency code strings in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -68251,15 +68251,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -68296,23 +68296,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnPatternsTextsNotMatchingRegexPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnPatternsTextsNotMatchingRegexPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnPatternsTextsNotMatchingRegexPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnPatternsTextsNotMatchingRegexPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a minimum percentage of rows with matching regex in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -68421,15 +68421,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -68466,23 +68466,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnBoolTruePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnBoolTruePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnBoolTruePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnBoolTruePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set percentage of true value in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenPercentRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenPercentRuleParametersSpec" } } } @@ -68670,79 +68670,79 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_distinct_count" : { "description" : "Verifies that the number of distinct values stays within an accepted range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDistinctCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountCheckSpec" }, "daily_distinct_percent" : { "description" : "Verifies that the percentage of distinct values in a column does not fall below the minimum accepted percent. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDistinctPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentCheckSpec" }, "daily_duplicate_count" : { "description" : "Verifies that the number of duplicate values in a column does not exceed the maximum accepted count. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDuplicateCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnDuplicateCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnDuplicateCountCheckSpec", + "$ref" : "#/components/schemas/ColumnDuplicateCountCheckSpec" }, "daily_duplicate_percent" : { "description" : "Verifies that the percentage of duplicate values in a column does not exceed the maximum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDuplicatePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDuplicatePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDuplicatePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDuplicatePercentCheckSpec" }, "daily_distinct_count_anomaly" : { "description" : "Verifies that the distinct count in a monitored column is within a two-tailed percentile from measurements made during the last 90 days.", - "$ref" : "#/components/schemas/ColumnDistinctCountAnomalyDifferencingCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountAnomalyDifferencingCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountAnomalyDifferencingCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountAnomalyDifferencingCheckSpec" }, "daily_distinct_percent_anomaly" : { "description" : "Verifies that the distinct percent in a monitored column is within a two-tailed percentile from measurements made during the last 90 days.", - "$ref" : "#/components/schemas/ColumnDistinctPercentAnomalyStationaryCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentAnomalyStationaryCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentAnomalyStationaryCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentAnomalyStationaryCheckSpec" }, "daily_distinct_count_change" : { "description" : "Verifies that the distinct count in a monitored column has changed by a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec" }, "daily_distinct_percent_change" : { "description" : "Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec" }, "daily_distinct_count_change_1_day" : { "description" : "Verifies that the distinct count in a monitored column has changed by a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnDistinctCountChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountChange1DayCheckSpec" }, "daily_distinct_count_change_7_days" : { "description" : "Verifies that the distinct count in a monitored column has changed by a fixed rate since the last readout from last week.", - "$ref" : "#/components/schemas/ColumnDistinctCountChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountChange7DaysCheckSpec" }, "daily_distinct_count_change_30_days" : { "description" : "Verifies that the distinct count in a monitored column has changed by a fixed rate since the last readout from last month.", - "$ref" : "#/components/schemas/ColumnDistinctCountChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountChange30DaysCheckSpec" }, "daily_distinct_percent_change_1_day" : { "description" : "Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnDistinctPercentChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentChange1DayCheckSpec" }, "daily_distinct_percent_change_7_days" : { "description" : "Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout from last week.", - "$ref" : "#/components/schemas/ColumnDistinctPercentChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentChange7DaysCheckSpec" }, "daily_distinct_percent_change_30_days" : { "description" : "Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout from last month.", - "$ref" : "#/components/schemas/ColumnDistinctPercentChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentChange30DaysCheckSpec" } } } @@ -68846,79 +68846,79 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_distinct_count" : { "description" : "Verifies that the number of distinct values stays within an accepted range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnDistinctCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountCheckSpec" }, "daily_partition_distinct_percent" : { "description" : "Verifies that the percentage of distinct values in a column does not fall below the minimum accepted percent. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnDistinctPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentCheckSpec" }, "daily_partition_duplicate_count" : { "description" : "Verifies that the number of duplicate values in a column does not exceed the maximum accepted count. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnDuplicateCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnDuplicateCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnDuplicateCountCheckSpec", + "$ref" : "#/components/schemas/ColumnDuplicateCountCheckSpec" }, "daily_partition_duplicate_percent" : { "description" : "Verifies that the percent of duplicate values in a column does not exceed the maximum accepted percent. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnDuplicatePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDuplicatePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDuplicatePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDuplicatePercentCheckSpec" }, "daily_partition_distinct_count_anomaly" : { "description" : "Verifies that the distinct count in a monitored column is within a two-tailed percentile from measurements made during the last 90 days.", - "$ref" : "#/components/schemas/ColumnDistinctCountAnomalyStationaryPartitionCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountAnomalyStationaryPartitionCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountAnomalyStationaryPartitionCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountAnomalyStationaryPartitionCheckSpec" }, "daily_partition_distinct_percent_anomaly" : { "description" : "Verifies that the distinct percent in a monitored column is within a two-tailed percentile from measurements made during the last 90 days.", - "$ref" : "#/components/schemas/ColumnDistinctPercentAnomalyStationaryCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentAnomalyStationaryCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentAnomalyStationaryCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentAnomalyStationaryCheckSpec" }, "daily_partition_distinct_count_change" : { "description" : "Verifies that the distinct count in a monitored column has changed by a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec" }, "daily_partition_distinct_percent_change" : { "description" : "Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec" }, "daily_partition_distinct_count_change_1_day" : { "description" : "Verifies that the distinct count in a monitored column has changed by a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnDistinctCountChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountChange1DayCheckSpec" }, "daily_partition_distinct_count_change_7_days" : { "description" : "Verifies that the distinct count in a monitored column has changed by a fixed rate since the last readout from the last week.", - "$ref" : "#/components/schemas/ColumnDistinctCountChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountChange7DaysCheckSpec" }, "daily_partition_distinct_count_change_30_days" : { "description" : "Verifies that the distinct count in a monitored column has changed by a fixed rate since the last readout from the last month.", - "$ref" : "#/components/schemas/ColumnDistinctCountChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountChange30DaysCheckSpec" }, "daily_partition_distinct_percent_change_1_day" : { "description" : "Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnDistinctPercentChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentChange1DayCheckSpec" }, "daily_partition_distinct_percent_change_7_days" : { "description" : "Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout from the last week.", - "$ref" : "#/components/schemas/ColumnDistinctPercentChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentChange7DaysCheckSpec" }, "daily_partition_distinct_percent_change_30_days" : { "description" : "Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout from the last month.", - "$ref" : "#/components/schemas/ColumnDistinctPercentChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentChange30DaysCheckSpec" } } } @@ -68973,8 +68973,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountSensorParametersSpec" } } } @@ -69029,8 +69029,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentSensorParametersSpec" } } } @@ -69085,8 +69085,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDuplicateCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDuplicateCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDuplicateCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDuplicateCountSensorParametersSpec" } } } @@ -69141,8 +69141,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/ColumnUniquenessDuplicatePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDuplicatePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDuplicatePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDuplicatePercentSensorParametersSpec" } } } @@ -69206,39 +69206,39 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_distinct_count" : { "description" : "Verifies that the number of distinct values stays within an accepted range. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDistinctCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountCheckSpec" }, "monthly_distinct_percent" : { "description" : "Verifies that the percentage of distinct values in a column does not fall below the minimum accepted percent. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDistinctPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentCheckSpec" }, "monthly_duplicate_count" : { "description" : "Verifies that the number of duplicate values in a column does not exceed the maximum accepted count. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDuplicateCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnDuplicateCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnDuplicateCountCheckSpec", + "$ref" : "#/components/schemas/ColumnDuplicateCountCheckSpec" }, "monthly_duplicate_percent" : { "description" : "Verifies that the percentage of duplicate values in a column does not exceed the maximum accepted percentage. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnDuplicatePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDuplicatePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDuplicatePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDuplicatePercentCheckSpec" }, "monthly_distinct_count_change" : { "description" : "Verifies that the distinct count in a monitored column has changed by a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec" }, "monthly_distinct_percent_change" : { "description" : "Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec" } } } @@ -69302,39 +69302,39 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_distinct_count" : { "description" : "Verifies that the number of distinct values stays within an accepted range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnDistinctCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountCheckSpec" }, "monthly_partition_distinct_percent" : { "description" : "Verifies that the percentage of distinct values in a column does not fall below the minimum accepted percent. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnDistinctPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentCheckSpec" }, "monthly_partition_duplicate_count" : { "description" : "Verifies that the number of duplicate values in a column does not exceed the maximum accepted count. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnDuplicateCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnDuplicateCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnDuplicateCountCheckSpec", + "$ref" : "#/components/schemas/ColumnDuplicateCountCheckSpec" }, "monthly_partition_duplicate_percent" : { "description" : "Verifies that the percent of duplicate values in a column does not exceed the maximum accepted percent. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnDuplicatePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDuplicatePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDuplicatePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDuplicatePercentCheckSpec" }, "monthly_partition_distinct_count_change" : { "description" : "Verifies that the distinct count in a monitored column has changed by a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec" }, "monthly_partition_distinct_percent_change" : { "description" : "Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec" } } } @@ -69438,79 +69438,79 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_distinct_count" : { "description" : "Verifies that the number of distinct values stays within an accepted range.", - "$ref" : "#/components/schemas/ColumnDistinctCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountCheckSpec" }, "profile_distinct_percent" : { "description" : "Verifies that the percentage of distinct values in a column does not fall below the minimum accepted percent.", - "$ref" : "#/components/schemas/ColumnDistinctPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentCheckSpec" }, "profile_duplicate_count" : { "description" : "Verifies that the number of duplicate values in a column does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/ColumnDuplicateCountCheckSpec", - "originalRef" : "#/components/schemas/ColumnDuplicateCountCheckSpec" + "originalRef" : "#/components/schemas/ColumnDuplicateCountCheckSpec", + "$ref" : "#/components/schemas/ColumnDuplicateCountCheckSpec" }, "profile_duplicate_percent" : { "description" : "Verifies that the percentage of duplicate values in a column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnDuplicatePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnDuplicatePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnDuplicatePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnDuplicatePercentCheckSpec" }, "profile_distinct_count_anomaly" : { "description" : "Verifies that the distinct count in a monitored column is within a two-tailed percentile from measurements made during the last 90 days.", - "$ref" : "#/components/schemas/ColumnDistinctCountAnomalyDifferencingCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountAnomalyDifferencingCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountAnomalyDifferencingCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountAnomalyDifferencingCheckSpec" }, "profile_distinct_percent_anomaly" : { "description" : "Verifies that the distinct percent in a monitored column is within a two-tailed percentile from measurements made during the last 90 days.", - "$ref" : "#/components/schemas/ColumnDistinctPercentAnomalyStationaryCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentAnomalyStationaryCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentAnomalyStationaryCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentAnomalyStationaryCheckSpec" }, "profile_distinct_count_change" : { "description" : "Verifies that the distinct count in a monitored column has changed by a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountChangeCheckSpec" }, "profile_distinct_percent_change" : { "description" : "Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout.", - "$ref" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentChangeCheckSpec" }, "profile_distinct_count_change_1_day" : { "description" : "Verifies that the distinct count in a monitored column has changed by a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnDistinctCountChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountChange1DayCheckSpec" }, "profile_distinct_count_change_7_days" : { "description" : "Verifies that the distinct count in a monitored column has changed by a fixed rate since the last readout from last week.", - "$ref" : "#/components/schemas/ColumnDistinctCountChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountChange7DaysCheckSpec" }, "profile_distinct_count_change_30_days" : { "description" : "Verifies that the distinct count in a monitored column has changed by a fixed rate since the last readout from last month.", - "$ref" : "#/components/schemas/ColumnDistinctCountChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctCountChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctCountChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctCountChange30DaysCheckSpec" }, "profile_distinct_percent_change_1_day" : { "description" : "Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout from yesterday.", - "$ref" : "#/components/schemas/ColumnDistinctPercentChange1DayCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentChange1DayCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentChange1DayCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentChange1DayCheckSpec" }, "profile_distinct_percent_change_7_days" : { "description" : "Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout from last week.", - "$ref" : "#/components/schemas/ColumnDistinctPercentChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentChange7DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentChange7DaysCheckSpec" }, "profile_distinct_percent_change_30_days" : { "description" : "Verifies that the distinct percent in a monitored column has changed by a fixed rate since the last readout from last month.", - "$ref" : "#/components/schemas/ColumnDistinctPercentChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/ColumnDistinctPercentChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/ColumnDistinctPercentChange30DaysCheckSpec", + "$ref" : "#/components/schemas/ColumnDistinctPercentChange30DaysCheckSpec" } } } @@ -69546,23 +69546,23 @@ "properties" : { "distinct_count" : { "description" : "Configuration of the profiler that counts distinct column values.", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctCountStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctCountStatisticsCollectorSpec" }, "distinct_percent" : { "description" : "Configuration of the profiler that measure the percentage of distinct column values.", - "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDistinctPercentStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDistinctPercentStatisticsCollectorSpec" }, "duplicate_count" : { "description" : "Configuration of the profiler that counts duplicate column values.", - "$ref" : "#/components/schemas/ColumnUniquenessDuplicateCountStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDuplicateCountStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDuplicateCountStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDuplicateCountStatisticsCollectorSpec" }, "duplicate_percent" : { "description" : "Configuration of the profiler that measure the percentage of duplicate column values.", - "$ref" : "#/components/schemas/ColumnUniquenessDuplicatePercentStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/ColumnUniquenessDuplicatePercentStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/ColumnUniquenessDuplicatePercentStatisticsCollectorSpec", + "$ref" : "#/components/schemas/ColumnUniquenessDuplicatePercentStatisticsCollectorSpec" } } } @@ -69671,15 +69671,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -69716,23 +69716,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericValidLatitudePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericValidLatitudePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericValidLatitudePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericValidLatitudePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set percentage of rows with valid latitude value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -69841,15 +69841,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -69886,23 +69886,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnNumericValidLongitudePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnNumericValidLongitudePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnNumericValidLongitudePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnNumericValidLongitudePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set percentage of rows with valid longitude value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -70022,49 +70022,49 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_empty_text_found" : { "description" : "Detects empty texts (not null, zero-length texts). This check counts empty and raises a data quality issue when their count exceeds a *max_count* parameter value. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec" }, "daily_whitespace_text_found" : { "description" : "Detects texts that contain only spaces and other whitespace characters. It raises a data quality issue when their count exceeds a *max_count* parameter value. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec" }, "daily_null_placeholder_text_found" : { "description" : "Detects texts that are well-known placeholders of null values, such as *None*, *null*, *n/a*. It counts null placeholders and raises a data quality issue when their count exceeds a *max_count* parameter value. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec" }, "daily_empty_text_percent" : { "description" : "Detects empty texts (not null, zero-length texts) and measures their percentage in the column. This check verifies that the rate of empty strings in a column does not exceed the maximum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec" }, "daily_whitespace_text_percent" : { "description" : "Detects texts that contain only spaces and other whitespace characters and measures their percentage in the column. It raises a data quality issue when their rate exceeds a *max_percent* parameter value. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec" }, "daily_null_placeholder_text_percent" : { "description" : "Detects texts that are well-known placeholders of null values, such as *None*, *null*, *n/a*, and measures their percentage in the column. It raises a data quality issue when their rate exceeds a *max_percent* parameter value. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec" }, "daily_text_surrounded_by_whitespace_found" : { "description" : "Detects text values that are surrounded by whitespace characters on any side. This check counts whitespace-surrounded texts and raises a data quality issue when their count exceeds the *max_count* parameter value. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec" }, "daily_text_surrounded_by_whitespace_percent" : { "description" : "This check detects text values that are surrounded by whitespace characters on any side and measures their percentage. This check raises a data quality issue when their percentage exceeds the *max_percent* parameter value. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec" } } } @@ -70138,49 +70138,49 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_empty_text_found" : { "description" : "Detects empty texts (not null, zero-length texts). This check counts empty and raises a data quality issue when their count exceeds a *max_count* parameter value. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec" }, "daily_partition_whitespace_text_found" : { "description" : "Detects texts that contain only spaces and other whitespace characters. It raises a data quality issue when their count exceeds a *max_count* parameter value. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec" }, "daily_partition_null_placeholder_text_found" : { "description" : "Detects texts that are well-known placeholders of null values, such as *None*, *null*, *n/a*. It counts null placeholders and raises a data quality issue when their count exceeds a *max_count* parameter value. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec" }, "daily_partition_empty_text_percent" : { "description" : "Detects empty texts (not null, zero-length texts) and measures their percentage in the column. This check verifies that the rate of empty strings in a column does not exceed the maximum accepted percentage. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec" }, "daily_partition_whitespace_text_percent" : { "description" : "Detects texts that contain only spaces and other whitespace characters and measures their percentage in the column. It raises a data quality issue when their rate exceeds a *max_percent* parameter value. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec" }, "daily_partition_null_placeholder_text_percent" : { "description" : "Detects texts that are well-known placeholders of null values, such as *None*, *null*, *n/a*, and measures their percentage in the column. It raises a data quality issue when their rate exceeds a *max_percent* parameter value. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec" }, "daily_partition_text_surrounded_by_whitespace_found" : { "description" : "Detects text values that are surrounded by whitespace characters on any side. This check counts whitespace-surrounded texts and raises a data quality issue when their count exceeds the *max_count* parameter value. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec" }, "daily_partition_text_surrounded_by_whitespace_percent" : { "description" : "This check detects text values that are surrounded by whitespace characters on any side and measures their percentage. This check raises a data quality issue when their percentage exceeds the *max_percent* parameter value. Analyzes every daily partition and creates a separate data quality check result with the time period value that identifies the daily partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec" } } } @@ -70312,15 +70312,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -70357,23 +70357,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with empty strings in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -70482,15 +70482,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -70527,23 +70527,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with empty strings in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -70640,49 +70640,49 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_empty_text_found" : { "description" : "Detects empty texts (not null, zero-length texts). This check counts empty and raises a data quality issue when their count exceeds a *max_count* parameter value. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec" }, "monthly_whitespace_text_found" : { "description" : "Detects texts that contain only spaces and other whitespace characters. It raises a data quality issue when their count exceeds a *max_count* parameter value. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec" }, "monthly_null_placeholder_text_found" : { "description" : "Detects texts that are well-known placeholders of null values, such as *None*, *null*, *n/a*. It counts null placeholders and raises a data quality issue when their count exceeds a *max_count* parameter value. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec" }, "monthly_empty_text_percent" : { "description" : "Detects empty texts (not null, zero-length texts) and measures their percentage in the column. This check verifies that the rate of empty strings in a column does not exceed the maximum accepted percentage. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec" }, "monthly_whitespace_text_percent" : { "description" : "Detects texts that contain only spaces and other whitespace characters and measures their percentage in the column. It raises a data quality issue when their rate exceeds a *max_percent* parameter value. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec" }, "monthly_null_placeholder_text_percent" : { "description" : "Detects texts that are well-known placeholders of null values, such as *None*, *null*, *n/a*, and measures their percentage in the column. It raises a data quality issue when their rate exceeds a *max_percent* parameter value. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec" }, "monthly_text_surrounded_by_whitespace_found" : { "description" : "Detects text values that are surrounded by whitespace characters on any side. This check counts whitespace-surrounded texts and raises a data quality issue when their count exceeds the *max_count* parameter value. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec" }, "monthly_text_surrounded_by_whitespace_percent" : { "description" : "This check detects text values that are surrounded by whitespace characters on any side and measures their percentage. This check raises a data quality issue when their percentage exceeds the *max_percent* parameter value. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec" } } } @@ -70756,49 +70756,49 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_empty_text_found" : { "description" : "Detects empty texts (not null, zero-length texts). This check counts empty and raises a data quality issue when their count exceeds a *max_count* parameter value. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec" }, "monthly_partition_whitespace_text_found" : { "description" : "Detects texts that contain only spaces and other whitespace characters. It raises a data quality issue when their count exceeds a *max_count* parameter value. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec" }, "monthly_partition_null_placeholder_text_found" : { "description" : "Detects texts that are well-known placeholders of null values, such as *None*, *null*, *n/a*. It counts null placeholders and raises a data quality issue when their count exceeds a *max_count* parameter value. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec" }, "monthly_partition_empty_text_percent" : { "description" : "Detects empty texts (not null, zero-length texts) and measures their percentage in the column. This check verifies that the rate of empty strings in a column does not exceed the maximum accepted percentage. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec" }, "monthly_partition_whitespace_text_percent" : { "description" : "Detects texts that contain only spaces and other whitespace characters and measures their percentage in the column. It raises a data quality issue when their rate exceeds a *max_percent* parameter value. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec" }, "monthly_partition_null_placeholder_text_percent" : { "description" : "Detects texts that are well-known placeholders of null values, such as *None*, *null*, *n/a*, and measures their percentage in the column. It raises a data quality issue when their rate exceeds a *max_percent* parameter value. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec" }, "monthly_partition_text_surrounded_by_whitespace_found" : { "description" : "Detects text values that are surrounded by whitespace characters on any side. This check counts whitespace-surrounded texts and raises a data quality issue when their count exceeds the *max_count* parameter value. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec" }, "monthly_partition_text_surrounded_by_whitespace_percent" : { "description" : "This check detects text values that are surrounded by whitespace characters on any side and measures their percentage. This check raises a data quality issue when their percentage exceeds the *max_percent* parameter value. Analyzes every monthly partition and creates a separate data quality check result with the time period value that identifies the monthly partition.", - "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec" } } } @@ -70907,15 +70907,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -70952,23 +70952,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnWhitespaceBlankNullPlaceholderTextCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceBlankNullPlaceholderTextCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceBlankNullPlaceholderTextCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceBlankNullPlaceholderTextCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with a null placeholder strings in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -71077,15 +71077,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -71122,23 +71122,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnWhitespaceBlankNullPlaceholderTextPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceBlankNullPlaceholderTextPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceBlankNullPlaceholderTextPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceBlankNullPlaceholderTextPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of rows with a null placeholder strings in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -71212,49 +71212,49 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_empty_text_found" : { "description" : "Detects empty texts (not null, zero-length texts). This check counts empty and raises a data quality issue when their count exceeds a *max_count* parameter value.", - "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextFoundCheckSpec" }, "profile_whitespace_text_found" : { "description" : "Detects texts that contain only spaces and other whitespace characters. It raises a data quality issue when their count exceeds a *max_count* parameter value.", - "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextFoundCheckSpec" }, "profile_null_placeholder_text_found" : { "description" : "Detects texts that are well-known placeholders of null values, such as *None*, *null*, *n/a*. It counts null placeholders and raises a data quality issue when their count exceeds a *max_count* parameter value.", - "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextFoundCheckSpec" }, "profile_empty_text_percent" : { "description" : "Detects empty texts (not null, zero-length texts) and measures their percentage in the column. This check verifies that the rate of empty strings in a column does not exceed the maximum accepted percentage. This check verifies that the rate of empty strings in a column does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceEmptyTextPercentCheckSpec" }, "profile_whitespace_text_percent" : { "description" : "Detects texts that contain only spaces and other whitespace characters and measures their percentage in the column. It raises a data quality issue when their rate exceeds a *max_percent* parameter value.", - "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentCheckSpec" }, "profile_null_placeholder_text_percent" : { "description" : "Detects texts that are well-known placeholders of null values, such as *None*, *null*, *n/a*, and measures their percentage in the column. It raises a data quality issue when their rate exceeds a *max_percent* parameter value.", - "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceNullPlaceholderTextPercentCheckSpec" }, "profile_text_surrounded_by_whitespace_found" : { "description" : "Detects text values that are surrounded by whitespace characters on any side. This check counts whitespace-surrounded texts and raises a data quality issue when their count exceeds the *max_count* parameter value.", - "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceFoundCheckSpec" }, "profile_text_surrounded_by_whitespace_percent" : { "description" : "This check detects text values that are surrounded by whitespace characters on any side and measures their percentage. This check raises a data quality issue when their percentage exceeds the *max_percent* parameter value.", - "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentCheckSpec" } } } @@ -71386,15 +71386,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -71431,23 +71431,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespaceCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with surrounded by whitespace strings in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -71556,15 +71556,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -71601,23 +71601,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceTextSurroundedByWhitespacePercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with surrounded by whitespace strings in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -71772,15 +71772,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -71817,23 +71817,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextCountSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextCountSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextCountSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with whitespace strings in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -71942,15 +71942,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -71987,23 +71987,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentSensorParametersSpec", + "$ref" : "#/components/schemas/ColumnWhitespaceWhitespaceTextPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with whitespace strings in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -72374,8 +72374,8 @@ }, "incident_notification" : { "description" : "Configuration of addresses for new or updated incident notifications.", - "$ref" : "#/components/schemas/IncidentNotificationSpec", - "originalRef" : "#/components/schemas/IncidentNotificationSpec" + "originalRef" : "#/components/schemas/IncidentNotificationSpec", + "$ref" : "#/components/schemas/IncidentNotificationSpec" } } } @@ -72502,6 +72502,11 @@ "extensions" : { }, "exampleSetFlag" : false }, + "teradata" : { + "$ref" : "#/components/schemas/TeradataParametersSpec", + "extensions" : { }, + "exampleSetFlag" : false + }, "run_checks_job_template" : { "$ref" : "#/components/schemas/CheckSearchFilters", "extensions" : { }, @@ -72612,122 +72617,127 @@ "provider_type" : { "type" : "string", "description" : "Database provider type (required). Accepts: bigquery, snowflake, etc.", - "enum" : [ "bigquery", "databricks", "mysql", "oracle", "postgresql", "duckdb", "presto", "redshift", "snowflake", "spark", "sqlserver", "trino", "hana", "db2", "mariadb", "clickhouse", "questdb" ] + "enum" : [ "bigquery", "clickhouse", "databricks", "db2", "duckdb", "hana", "mariadb", "mysql", "oracle", "postgresql", "presto", "questdb", "redshift", "snowflake", "spark", "sqlserver", "teradata", "trino" ] }, "bigquery" : { "description" : "BigQuery connection parameters. Specify parameters in the bigquery section.", - "$ref" : "#/components/schemas/BigQueryParametersSpec", - "originalRef" : "#/components/schemas/BigQueryParametersSpec" + "originalRef" : "#/components/schemas/BigQueryParametersSpec", + "$ref" : "#/components/schemas/BigQueryParametersSpec" }, "snowflake" : { "description" : "Snowflake connection parameters.", - "$ref" : "#/components/schemas/SnowflakeParametersSpec", - "originalRef" : "#/components/schemas/SnowflakeParametersSpec" + "originalRef" : "#/components/schemas/SnowflakeParametersSpec", + "$ref" : "#/components/schemas/SnowflakeParametersSpec" }, "postgresql" : { "description" : "PostgreSQL connection parameters.", - "$ref" : "#/components/schemas/PostgresqlParametersSpec", - "originalRef" : "#/components/schemas/PostgresqlParametersSpec" + "originalRef" : "#/components/schemas/PostgresqlParametersSpec", + "$ref" : "#/components/schemas/PostgresqlParametersSpec" }, "duckdb" : { "description" : "DuckDB connection parameters.", - "$ref" : "#/components/schemas/DuckdbParametersSpec", - "originalRef" : "#/components/schemas/DuckdbParametersSpec" + "originalRef" : "#/components/schemas/DuckdbParametersSpec", + "$ref" : "#/components/schemas/DuckdbParametersSpec" }, "redshift" : { "description" : "Redshift connection parameters.", - "$ref" : "#/components/schemas/RedshiftParametersSpec", - "originalRef" : "#/components/schemas/RedshiftParametersSpec" + "originalRef" : "#/components/schemas/RedshiftParametersSpec", + "$ref" : "#/components/schemas/RedshiftParametersSpec" }, "sqlserver" : { "description" : "SqlServer connection parameters.", - "$ref" : "#/components/schemas/SqlServerParametersSpec", - "originalRef" : "#/components/schemas/SqlServerParametersSpec" + "originalRef" : "#/components/schemas/SqlServerParametersSpec", + "$ref" : "#/components/schemas/SqlServerParametersSpec" }, "presto" : { "description" : "Presto connection parameters.", - "$ref" : "#/components/schemas/PrestoParametersSpec", - "originalRef" : "#/components/schemas/PrestoParametersSpec" + "originalRef" : "#/components/schemas/PrestoParametersSpec", + "$ref" : "#/components/schemas/PrestoParametersSpec" }, "trino" : { "description" : "Trino connection parameters.", - "$ref" : "#/components/schemas/TrinoParametersSpec", - "originalRef" : "#/components/schemas/TrinoParametersSpec" + "originalRef" : "#/components/schemas/TrinoParametersSpec", + "$ref" : "#/components/schemas/TrinoParametersSpec" }, "mysql" : { "description" : "MySQL connection parameters.", - "$ref" : "#/components/schemas/MysqlParametersSpec", - "originalRef" : "#/components/schemas/MysqlParametersSpec" + "originalRef" : "#/components/schemas/MysqlParametersSpec", + "$ref" : "#/components/schemas/MysqlParametersSpec" }, "oracle" : { "description" : "Oracle connection parameters.", - "$ref" : "#/components/schemas/OracleParametersSpec", - "originalRef" : "#/components/schemas/OracleParametersSpec" + "originalRef" : "#/components/schemas/OracleParametersSpec", + "$ref" : "#/components/schemas/OracleParametersSpec" }, "spark" : { "description" : "Spark connection parameters.", - "$ref" : "#/components/schemas/SparkParametersSpec", - "originalRef" : "#/components/schemas/SparkParametersSpec" + "originalRef" : "#/components/schemas/SparkParametersSpec", + "$ref" : "#/components/schemas/SparkParametersSpec" }, "databricks" : { "description" : "Databricks connection parameters.", - "$ref" : "#/components/schemas/DatabricksParametersSpec", - "originalRef" : "#/components/schemas/DatabricksParametersSpec" + "originalRef" : "#/components/schemas/DatabricksParametersSpec", + "$ref" : "#/components/schemas/DatabricksParametersSpec" }, "hana" : { "description" : "HANA connection parameters.", - "$ref" : "#/components/schemas/HanaParametersSpec", - "originalRef" : "#/components/schemas/HanaParametersSpec" + "originalRef" : "#/components/schemas/HanaParametersSpec", + "$ref" : "#/components/schemas/HanaParametersSpec" }, "db2" : { "description" : "DB2 connection parameters.", - "$ref" : "#/components/schemas/Db2ParametersSpec", - "originalRef" : "#/components/schemas/Db2ParametersSpec" + "originalRef" : "#/components/schemas/Db2ParametersSpec", + "$ref" : "#/components/schemas/Db2ParametersSpec" }, "mariadb" : { "description" : "MariaDB connection parameters.", - "$ref" : "#/components/schemas/MariaDbParametersSpec", - "originalRef" : "#/components/schemas/MariaDbParametersSpec" + "originalRef" : "#/components/schemas/MariaDbParametersSpec", + "$ref" : "#/components/schemas/MariaDbParametersSpec" }, "clickhouse" : { "description" : "ClickHouse connection parameters.", - "$ref" : "#/components/schemas/ClickHouseParametersSpec", - "originalRef" : "#/components/schemas/ClickHouseParametersSpec" + "originalRef" : "#/components/schemas/ClickHouseParametersSpec", + "$ref" : "#/components/schemas/ClickHouseParametersSpec" }, "questdb" : { "description" : "QuestDB connection parameters.", - "$ref" : "#/components/schemas/QuestDbParametersSpec", - "originalRef" : "#/components/schemas/QuestDbParametersSpec" + "originalRef" : "#/components/schemas/QuestDbParametersSpec", + "$ref" : "#/components/schemas/QuestDbParametersSpec" + }, + "teradata" : { + "description" : "Teradata connection parameters.", + "originalRef" : "#/components/schemas/TeradataParametersSpec", + "$ref" : "#/components/schemas/TeradataParametersSpec" }, "run_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run all checks within this connection.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "run_profiling_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run profiling checks within this connection.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "run_monitoring_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run monitoring checks within this connection.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "run_partition_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run partition partitioned checks within this connection.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "collect_statistics_job_template" : { "description" : "Configured parameters for the \"collect statistics\" job that should be pushed to the job queue in order to run all statistics collectors within this connection.", - "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters", - "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters" + "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters", + "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters" }, "data_clean_job_template" : { "description" : "Configured parameters for the \"data clean\" job that after being supplied with a time range should be pushed to the job queue in order to remove stored results connected with this connection.", - "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters", - "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters" + "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters", + "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters" }, "advanced_properties" : { "type" : "object", @@ -72852,6 +72862,11 @@ "extensions" : { }, "exampleSetFlag" : false }, + "teradata" : { + "$ref" : "#/components/schemas/TeradataParametersSpec", + "extensions" : { }, + "exampleSetFlag" : false + }, "parallel_jobs_limit" : { "type" : "integer", "description" : "The concurrency limit for the maximum number of parallel SQL queries executed on this connection.", @@ -72938,92 +72953,97 @@ "provider_type" : { "type" : "string", "description" : "Database provider type (required).", - "enum" : [ "bigquery", "databricks", "mysql", "oracle", "postgresql", "duckdb", "presto", "redshift", "snowflake", "spark", "sqlserver", "trino", "hana", "db2", "mariadb", "clickhouse", "questdb" ] + "enum" : [ "bigquery", "clickhouse", "databricks", "db2", "duckdb", "hana", "mariadb", "mysql", "oracle", "postgresql", "presto", "questdb", "redshift", "snowflake", "spark", "sqlserver", "teradata", "trino" ] }, "bigquery" : { "description" : "BigQuery connection parameters. Specify parameters in the bigquery section.", - "$ref" : "#/components/schemas/BigQueryParametersSpec", - "originalRef" : "#/components/schemas/BigQueryParametersSpec" + "originalRef" : "#/components/schemas/BigQueryParametersSpec", + "$ref" : "#/components/schemas/BigQueryParametersSpec" }, "snowflake" : { "description" : "Snowflake connection parameters. Specify parameters in the snowflake section or set the url (which is the Snowflake JDBC url).", - "$ref" : "#/components/schemas/SnowflakeParametersSpec", - "originalRef" : "#/components/schemas/SnowflakeParametersSpec" + "originalRef" : "#/components/schemas/SnowflakeParametersSpec", + "$ref" : "#/components/schemas/SnowflakeParametersSpec" }, "postgresql" : { "description" : "PostgreSQL connection parameters. Specify parameters in the postgresql section or set the url (which is the PostgreSQL JDBC url).", - "$ref" : "#/components/schemas/PostgresqlParametersSpec", - "originalRef" : "#/components/schemas/PostgresqlParametersSpec" + "originalRef" : "#/components/schemas/PostgresqlParametersSpec", + "$ref" : "#/components/schemas/PostgresqlParametersSpec" }, "duckdb" : { "description" : "DuckDB connection parameters. Specify parameters in the duckdb section or set the url (which is the DuckDB JDBC url).", - "$ref" : "#/components/schemas/DuckdbParametersSpec", - "originalRef" : "#/components/schemas/DuckdbParametersSpec" + "originalRef" : "#/components/schemas/DuckdbParametersSpec", + "$ref" : "#/components/schemas/DuckdbParametersSpec" }, "redshift" : { "description" : "Redshift connection parameters. Specify parameters in the redshift section or set the url (which is the Redshift JDBC url).", - "$ref" : "#/components/schemas/RedshiftParametersSpec", - "originalRef" : "#/components/schemas/RedshiftParametersSpec" + "originalRef" : "#/components/schemas/RedshiftParametersSpec", + "$ref" : "#/components/schemas/RedshiftParametersSpec" }, "sqlserver" : { "description" : "SQL Server connection parameters. Specify parameters in the sqlserver section or set the url (which is the SQL Server JDBC url).", - "$ref" : "#/components/schemas/SqlServerParametersSpec", - "originalRef" : "#/components/schemas/SqlServerParametersSpec" + "originalRef" : "#/components/schemas/SqlServerParametersSpec", + "$ref" : "#/components/schemas/SqlServerParametersSpec" }, "presto" : { "description" : "Presto connection parameters. Specify parameters in the presto section or set the url (which is the Presto JDBC url).", - "$ref" : "#/components/schemas/PrestoParametersSpec", - "originalRef" : "#/components/schemas/PrestoParametersSpec" + "originalRef" : "#/components/schemas/PrestoParametersSpec", + "$ref" : "#/components/schemas/PrestoParametersSpec" }, "trino" : { "description" : "Trino connection parameters. Specify parameters in the trino section or set the url (which is the Trino JDBC url).", - "$ref" : "#/components/schemas/TrinoParametersSpec", - "originalRef" : "#/components/schemas/TrinoParametersSpec" + "originalRef" : "#/components/schemas/TrinoParametersSpec", + "$ref" : "#/components/schemas/TrinoParametersSpec" }, "mysql" : { "description" : "MySQL connection parameters. Specify parameters in the mysql section or set the url (which is the MySQL JDBC url).", - "$ref" : "#/components/schemas/MysqlParametersSpec", - "originalRef" : "#/components/schemas/MysqlParametersSpec" + "originalRef" : "#/components/schemas/MysqlParametersSpec", + "$ref" : "#/components/schemas/MysqlParametersSpec" }, "oracle" : { "description" : "Oracle connection parameters. Specify parameters in the oracle section or set the url (which is the Oracle JDBC url).", - "$ref" : "#/components/schemas/OracleParametersSpec", - "originalRef" : "#/components/schemas/OracleParametersSpec" + "originalRef" : "#/components/schemas/OracleParametersSpec", + "$ref" : "#/components/schemas/OracleParametersSpec" }, "spark" : { "description" : "Spark connection parameters. Specify parameters in the spark section or set the url (which is the Spark JDBC url).", - "$ref" : "#/components/schemas/SparkParametersSpec", - "originalRef" : "#/components/schemas/SparkParametersSpec" + "originalRef" : "#/components/schemas/SparkParametersSpec", + "$ref" : "#/components/schemas/SparkParametersSpec" }, "databricks" : { "description" : "Databricks connection parameters. Specify parameters in the databricks section or set the url (which is the Databricks JDBC url).", - "$ref" : "#/components/schemas/DatabricksParametersSpec", - "originalRef" : "#/components/schemas/DatabricksParametersSpec" + "originalRef" : "#/components/schemas/DatabricksParametersSpec", + "$ref" : "#/components/schemas/DatabricksParametersSpec" }, "hana" : { "description" : "HANA connection parameters. Specify parameters in the hana section or set the url (which is the HANA JDBC url).", - "$ref" : "#/components/schemas/HanaParametersSpec", - "originalRef" : "#/components/schemas/HanaParametersSpec" + "originalRef" : "#/components/schemas/HanaParametersSpec", + "$ref" : "#/components/schemas/HanaParametersSpec" }, "db2" : { "description" : "DB2 connection parameters. Specify parameters in the db2 section or set the url (which is the DB2 JDBC url).", - "$ref" : "#/components/schemas/Db2ParametersSpec", - "originalRef" : "#/components/schemas/Db2ParametersSpec" + "originalRef" : "#/components/schemas/Db2ParametersSpec", + "$ref" : "#/components/schemas/Db2ParametersSpec" }, "mariadb" : { "description" : "MariaDB connection parameters. Specify parameters in the mariadb section or set the url (which is the MariaDB JDBC url).", - "$ref" : "#/components/schemas/MariaDbParametersSpec", - "originalRef" : "#/components/schemas/MariaDbParametersSpec" + "originalRef" : "#/components/schemas/MariaDbParametersSpec", + "$ref" : "#/components/schemas/MariaDbParametersSpec" }, "clickhouse" : { "description" : "ClickHouse connection parameters. Specify parameters in the clickhouse section or set the url (which is the ClickHouse JDBC url).", - "$ref" : "#/components/schemas/ClickHouseParametersSpec", - "originalRef" : "#/components/schemas/ClickHouseParametersSpec" + "originalRef" : "#/components/schemas/ClickHouseParametersSpec", + "$ref" : "#/components/schemas/ClickHouseParametersSpec" }, "questdb" : { "description" : "QuestDB connection parameters. Specify parameters in the questdb section or set the url (which is the QuestDB JDBC url).", - "$ref" : "#/components/schemas/QuestDbParametersSpec", - "originalRef" : "#/components/schemas/QuestDbParametersSpec" + "originalRef" : "#/components/schemas/QuestDbParametersSpec", + "$ref" : "#/components/schemas/QuestDbParametersSpec" + }, + "teradata" : { + "description" : "Teradata connection parameters. Specify parameters in the teradata section or set the url (which is the Teradata JDBC url).", + "originalRef" : "#/components/schemas/TeradataParametersSpec", + "$ref" : "#/components/schemas/TeradataParametersSpec" }, "parallel_jobs_limit" : { "type" : "integer", @@ -73032,18 +73052,18 @@ }, "default_grouping_configuration" : { "description" : "Default data grouping configuration for all tables. The configuration may be overridden on table, column and check level. Data groupings are configured in two cases: (1) the data in the table should be analyzed with a GROUP BY condition, to analyze different datasets using separate time series, for example a table contains data from multiple countries and there is a 'country' column used for partitioning. a static dimension is assigned to a table, when the data is partitioned at a table level (similar tables store the same information, but for different countries, etc.). (2) a static dimension is assigned to a table, when the data is partitioned at a table level (similar tables store the same information, but for different countries, etc.). ", - "$ref" : "#/components/schemas/DataGroupingConfigurationSpec", - "originalRef" : "#/components/schemas/DataGroupingConfigurationSpec" + "originalRef" : "#/components/schemas/DataGroupingConfigurationSpec", + "$ref" : "#/components/schemas/DataGroupingConfigurationSpec" }, "schedules" : { "description" : "Configuration of the job scheduler that runs data quality checks. The scheduler configuration is divided into types of checks that have different schedules.", - "$ref" : "#/components/schemas/CronSchedulesSpec", - "originalRef" : "#/components/schemas/CronSchedulesSpec" + "originalRef" : "#/components/schemas/CronSchedulesSpec", + "$ref" : "#/components/schemas/CronSchedulesSpec" }, "auto_import_tables" : { "description" : "Configuration of CRON schedule used to automatically import new tables in regular intervals.", - "$ref" : "#/components/schemas/AutoImportTablesSpec", - "originalRef" : "#/components/schemas/AutoImportTablesSpec" + "originalRef" : "#/components/schemas/AutoImportTablesSpec", + "$ref" : "#/components/schemas/AutoImportTablesSpec" }, "schedule_on_instance" : { "type" : "string", @@ -73051,15 +73071,15 @@ }, "incident_grouping" : { "description" : "Configuration of data quality incident grouping. Configures how failed data quality checks are grouped into data quality incidents.", - "$ref" : "#/components/schemas/ConnectionIncidentGroupingSpec", - "originalRef" : "#/components/schemas/ConnectionIncidentGroupingSpec" + "originalRef" : "#/components/schemas/ConnectionIncidentGroupingSpec", + "$ref" : "#/components/schemas/ConnectionIncidentGroupingSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "labels" : { @@ -73134,8 +73154,8 @@ }, "spec" : { "description" : "Full connection specification, including all nested objects (but not a list of tables).", - "$ref" : "#/components/schemas/ConnectionSpec", - "originalRef" : "#/components/schemas/ConnectionSpec" + "originalRef" : "#/components/schemas/ConnectionSpec", + "$ref" : "#/components/schemas/ConnectionSpec" }, "can_edit" : { "type" : "boolean", @@ -73291,28 +73311,28 @@ "properties" : { "profiling" : { "description" : "Schedule for running profiling data quality checks.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "monitoring_daily" : { "description" : "Schedule for running daily monitoring checks.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "monitoring_monthly" : { "description" : "Schedule for running monthly monitoring checks.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "partitioned_daily" : { "description" : "Schedule for running daily partitioned checks.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "partitioned_monthly" : { "description" : "Schedule for running monthly partitioned checks.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" } } } @@ -73645,15 +73665,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -73698,23 +73718,23 @@ }, "parameters" : { "description" : "Custom sensor parameters", - "$ref" : "#/components/schemas/CustomSensorParametersSpec", - "originalRef" : "#/components/schemas/CustomSensorParametersSpec" + "originalRef" : "#/components/schemas/CustomSensorParametersSpec", + "$ref" : "#/components/schemas/CustomSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/CustomRuleParametersSpec", - "originalRef" : "#/components/schemas/CustomRuleParametersSpec" + "originalRef" : "#/components/schemas/CustomRuleParametersSpec", + "$ref" : "#/components/schemas/CustomRuleParametersSpec" }, "error" : { "description" : "Default alerting thresholdthat raises a data quality issue at an error severity level", - "$ref" : "#/components/schemas/CustomRuleParametersSpec", - "originalRef" : "#/components/schemas/CustomRuleParametersSpec" + "originalRef" : "#/components/schemas/CustomRuleParametersSpec", + "$ref" : "#/components/schemas/CustomRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/CustomRuleParametersSpec", - "originalRef" : "#/components/schemas/CustomRuleParametersSpec" + "originalRef" : "#/components/schemas/CustomRuleParametersSpec", + "$ref" : "#/components/schemas/CustomRuleParametersSpec" } } } @@ -73922,16 +73942,16 @@ "type" : "array", "description" : "List of data quality dashboard at this level.", "items" : { - "$ref" : "#/components/schemas/DashboardSpec", - "originalRef" : "#/components/schemas/DashboardSpec" + "originalRef" : "#/components/schemas/DashboardSpec", + "$ref" : "#/components/schemas/DashboardSpec" } }, "folders" : { "type" : "array", "description" : "List of data quality dashboard folders at this level.", "items" : { - "$ref" : "#/components/schemas/DashboardsFolderSpec", - "originalRef" : "#/components/schemas/DashboardsFolderSpec" + "originalRef" : "#/components/schemas/DashboardsFolderSpec", + "$ref" : "#/components/schemas/DashboardsFolderSpec" } }, "hide_folder" : { @@ -74215,8 +74235,8 @@ }, "spec" : { "description" : "Data grouping specification with the definition of the list of data grouping dimensions, the column names to use in a **GROUP BY** clause or a value of a static tag to assign to every check result captured from the table.", - "$ref" : "#/components/schemas/DataGroupingConfigurationSpec", - "originalRef" : "#/components/schemas/DataGroupingConfigurationSpec" + "originalRef" : "#/components/schemas/DataGroupingConfigurationSpec", + "$ref" : "#/components/schemas/DataGroupingConfigurationSpec" }, "can_edit" : { "type" : "boolean", @@ -74286,48 +74306,48 @@ "properties" : { "level_1" : { "description" : "Data grouping dimension level 1 configuration.", - "$ref" : "#/components/schemas/DataGroupingDimensionSpec", - "originalRef" : "#/components/schemas/DataGroupingDimensionSpec" + "originalRef" : "#/components/schemas/DataGroupingDimensionSpec", + "$ref" : "#/components/schemas/DataGroupingDimensionSpec" }, "level_2" : { "description" : "Data grouping dimension level 2 configuration.", - "$ref" : "#/components/schemas/DataGroupingDimensionSpec", - "originalRef" : "#/components/schemas/DataGroupingDimensionSpec" + "originalRef" : "#/components/schemas/DataGroupingDimensionSpec", + "$ref" : "#/components/schemas/DataGroupingDimensionSpec" }, "level_3" : { "description" : "Data grouping dimension level 3 configuration.", - "$ref" : "#/components/schemas/DataGroupingDimensionSpec", - "originalRef" : "#/components/schemas/DataGroupingDimensionSpec" + "originalRef" : "#/components/schemas/DataGroupingDimensionSpec", + "$ref" : "#/components/schemas/DataGroupingDimensionSpec" }, "level_4" : { "description" : "Data grouping dimension level 4 configuration.", - "$ref" : "#/components/schemas/DataGroupingDimensionSpec", - "originalRef" : "#/components/schemas/DataGroupingDimensionSpec" + "originalRef" : "#/components/schemas/DataGroupingDimensionSpec", + "$ref" : "#/components/schemas/DataGroupingDimensionSpec" }, "level_5" : { "description" : "Data grouping dimension level 5 configuration.", - "$ref" : "#/components/schemas/DataGroupingDimensionSpec", - "originalRef" : "#/components/schemas/DataGroupingDimensionSpec" + "originalRef" : "#/components/schemas/DataGroupingDimensionSpec", + "$ref" : "#/components/schemas/DataGroupingDimensionSpec" }, "level_6" : { "description" : "Data grouping dimension level 6 configuration.", - "$ref" : "#/components/schemas/DataGroupingDimensionSpec", - "originalRef" : "#/components/schemas/DataGroupingDimensionSpec" + "originalRef" : "#/components/schemas/DataGroupingDimensionSpec", + "$ref" : "#/components/schemas/DataGroupingDimensionSpec" }, "level_7" : { "description" : "Data grouping dimension level 7 configuration.", - "$ref" : "#/components/schemas/DataGroupingDimensionSpec", - "originalRef" : "#/components/schemas/DataGroupingDimensionSpec" + "originalRef" : "#/components/schemas/DataGroupingDimensionSpec", + "$ref" : "#/components/schemas/DataGroupingDimensionSpec" }, "level_8" : { "description" : "Data grouping dimension level 8 configuration.", - "$ref" : "#/components/schemas/DataGroupingDimensionSpec", - "originalRef" : "#/components/schemas/DataGroupingDimensionSpec" + "originalRef" : "#/components/schemas/DataGroupingDimensionSpec", + "$ref" : "#/components/schemas/DataGroupingDimensionSpec" }, "level_9" : { "description" : "Data grouping dimension level 9 configuration.", - "$ref" : "#/components/schemas/DataGroupingDimensionSpec", - "originalRef" : "#/components/schemas/DataGroupingDimensionSpec" + "originalRef" : "#/components/schemas/DataGroupingDimensionSpec", + "$ref" : "#/components/schemas/DataGroupingDimensionSpec" } } } @@ -74374,8 +74394,8 @@ }, "spec" : { "description" : "Data grouping configuration specification.", - "$ref" : "#/components/schemas/DataGroupingConfigurationSpec", - "originalRef" : "#/components/schemas/DataGroupingConfigurationSpec" + "originalRef" : "#/components/schemas/DataGroupingConfigurationSpec", + "$ref" : "#/components/schemas/DataGroupingConfigurationSpec" }, "can_edit" : { "type" : "boolean", @@ -74991,13 +75011,13 @@ "properties" : { "jobId" : { "description" : "Job id that identifies a job that was started on the DQOps job queue.", - "$ref" : "#/components/schemas/DqoQueueJobId", - "originalRef" : "#/components/schemas/DqoQueueJobId" + "originalRef" : "#/components/schemas/DqoQueueJobId", + "$ref" : "#/components/schemas/DqoQueueJobId" }, "result" : { "description" : "Optional result object that is returned only when the wait parameter was true and the \"delete stored data\" job has finished. Contains a list of partitions that were deleted or updated.", - "$ref" : "#/components/schemas/DeleteStoredDataResult", - "originalRef" : "#/components/schemas/DeleteStoredDataResult" + "originalRef" : "#/components/schemas/DeleteStoredDataResult", + "$ref" : "#/components/schemas/DeleteStoredDataResult" }, "status" : { "type" : "string", @@ -75037,8 +75057,8 @@ "type" : "object", "description" : "Dictionary of partitions that where deleted or updated when the rows were deleted.", "additionalProperties" : { - "$ref" : "#/definitions/DataDeleteResultPartition", - "originalRef" : "#/definitions/DataDeleteResultPartition" + "originalRef" : "#/definitions/DataDeleteResultPartition", + "$ref" : "#/definitions/DataDeleteResultPartition" } } } @@ -75266,8 +75286,8 @@ }, "physical_table_name" : { "description" : "Full table name, including the schema and the table names.", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "compact_key" : { "type" : "string", @@ -75325,16 +75345,16 @@ "enum" : [ "queued", "running", "waiting", "finished", "failed", "cancel_requested", "cancelled" ] }, "jobId" : { - "$ref" : "#/components/schemas/DqoQueueJobId", - "originalRef" : "#/components/schemas/DqoQueueJobId" + "originalRef" : "#/components/schemas/DqoQueueJobId", + "$ref" : "#/components/schemas/DqoQueueJobId" }, "changeSequence" : { "type" : "integer", "format" : "int64" }, "updatedModel" : { - "$ref" : "#/components/schemas/DqoJobHistoryEntryModel", - "originalRef" : "#/components/schemas/DqoJobHistoryEntryModel" + "originalRef" : "#/components/schemas/DqoJobHistoryEntryModel", + "$ref" : "#/components/schemas/DqoJobHistoryEntryModel" }, "statusChangedAt" : { "type" : "integer", @@ -75432,78 +75452,78 @@ "properties" : { "synchronizeRootFolderParameters" : { "description" : "The job parameters for the \"synchronize folder\" queue job.", - "$ref" : "#/components/schemas/SynchronizeRootFolderDqoQueueJobParameters", - "originalRef" : "#/components/schemas/SynchronizeRootFolderDqoQueueJobParameters" + "originalRef" : "#/components/schemas/SynchronizeRootFolderDqoQueueJobParameters", + "$ref" : "#/components/schemas/SynchronizeRootFolderDqoQueueJobParameters" }, "synchronizeMultipleFoldersParameters" : { "description" : "The job parameters for the \"synchronize multiple folders\" queue job.", - "$ref" : "#/components/schemas/SynchronizeMultipleFoldersDqoQueueJobParameters", - "originalRef" : "#/components/schemas/SynchronizeMultipleFoldersDqoQueueJobParameters" + "originalRef" : "#/components/schemas/SynchronizeMultipleFoldersDqoQueueJobParameters", + "$ref" : "#/components/schemas/SynchronizeMultipleFoldersDqoQueueJobParameters" }, "runScheduledChecksParameters" : { "description" : "The job parameters for the \"run scheduled checks\" cron queue job.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "collectScheduledStatisticsParameters" : { "description" : "The job parameters for the \"collect scheduled statistics\" cron queue job.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "autoImportTablesParameters" : { "description" : "The job parameters for the \"auto import tables\" cron queue job.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "runChecksParameters" : { "description" : "The job parameters for the \"run checks\" queue job.", - "$ref" : "#/components/schemas/RunChecksParameters", - "originalRef" : "#/components/schemas/RunChecksParameters" + "originalRef" : "#/components/schemas/RunChecksParameters", + "$ref" : "#/components/schemas/RunChecksParameters" }, "runChecksOnTableParameters" : { "description" : "The job parameters for the \"run checks on table\" queue job.", - "$ref" : "#/components/schemas/RunChecksOnTableParameters", - "originalRef" : "#/components/schemas/RunChecksOnTableParameters" + "originalRef" : "#/components/schemas/RunChecksOnTableParameters", + "$ref" : "#/components/schemas/RunChecksOnTableParameters" }, "collectStatisticsParameters" : { "description" : "The job parameters for the \"collect statistics\" queue job.", - "$ref" : "#/components/schemas/CollectStatisticsQueueJobParameters", - "originalRef" : "#/components/schemas/CollectStatisticsQueueJobParameters" + "originalRef" : "#/components/schemas/CollectStatisticsQueueJobParameters", + "$ref" : "#/components/schemas/CollectStatisticsQueueJobParameters" }, "collectStatisticsOnTableParameters" : { "description" : "The job parameters for the \"collect statistics on table\" queue job.", - "$ref" : "#/components/schemas/CollectStatisticsOnTableQueueJobParameters", - "originalRef" : "#/components/schemas/CollectStatisticsOnTableQueueJobParameters" + "originalRef" : "#/components/schemas/CollectStatisticsOnTableQueueJobParameters", + "$ref" : "#/components/schemas/CollectStatisticsOnTableQueueJobParameters" }, "collectErrorSamplesParameters" : { "description" : "The job parameters for the \"collect error samples\" queue job.", - "$ref" : "#/components/schemas/CollectErrorSamplesParameters", - "originalRef" : "#/components/schemas/CollectErrorSamplesParameters" + "originalRef" : "#/components/schemas/CollectErrorSamplesParameters", + "$ref" : "#/components/schemas/CollectErrorSamplesParameters" }, "collectErrorSamplesOnTableParameters" : { "description" : "The job parameters for the \"collect error samples on table\" queue job.", - "$ref" : "#/components/schemas/CollectErrorSamplesOnTableParameters", - "originalRef" : "#/components/schemas/CollectErrorSamplesOnTableParameters" + "originalRef" : "#/components/schemas/CollectErrorSamplesOnTableParameters", + "$ref" : "#/components/schemas/CollectErrorSamplesOnTableParameters" }, "importSchemaParameters" : { "description" : "The job parameters for the \"collect schema\" queue job.", - "$ref" : "#/components/schemas/ImportSchemaQueueJobParameters", - "originalRef" : "#/components/schemas/ImportSchemaQueueJobParameters" + "originalRef" : "#/components/schemas/ImportSchemaQueueJobParameters", + "$ref" : "#/components/schemas/ImportSchemaQueueJobParameters" }, "importTableParameters" : { "description" : "The job parameters for the \"collect tables\" queue job.", - "$ref" : "#/components/schemas/ImportTablesQueueJobParameters", - "originalRef" : "#/components/schemas/ImportTablesQueueJobParameters" + "originalRef" : "#/components/schemas/ImportTablesQueueJobParameters", + "$ref" : "#/components/schemas/ImportTablesQueueJobParameters" }, "deleteStoredDataParameters" : { "description" : "The job parameters for the \"delete stored data\" queue job.", - "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters", - "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters" + "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters", + "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters" }, "repairStoredDataParameters" : { "description" : "The job parameters for the \"repair stored data\" queue job.", - "$ref" : "#/components/schemas/RepairStoredDataQueueJobParameters", - "originalRef" : "#/components/schemas/RepairStoredDataQueueJobParameters" + "originalRef" : "#/components/schemas/RepairStoredDataQueueJobParameters", + "$ref" : "#/components/schemas/RepairStoredDataQueueJobParameters" } } } @@ -75555,16 +75575,16 @@ "type" : "object", "properties" : { "jobId" : { - "$ref" : "#/components/schemas/DqoQueueJobId", - "originalRef" : "#/components/schemas/DqoQueueJobId" + "originalRef" : "#/components/schemas/DqoQueueJobId", + "$ref" : "#/components/schemas/DqoQueueJobId" }, "jobType" : { "type" : "string", "enum" : [ "run_checks", "run_checks_on_table", "collect_statistics", "collect_scheduled_statistics", "collect_statistics_on_table", "collect_error_samples", "collect_error_samples_on_table", "queue_thread_shutdown", "synchronize_folder", "synchronize_multiple_folders", "run_scheduled_checks_cron", "import_schema", "import_tables", "auto_import_tables", "delete_stored_data", "repair_stored_data" ] }, "parameters" : { - "$ref" : "#/components/schemas/DqoJobEntryParametersModel", - "originalRef" : "#/components/schemas/DqoJobEntryParametersModel" + "originalRef" : "#/components/schemas/DqoJobEntryParametersModel", + "$ref" : "#/components/schemas/DqoJobEntryParametersModel" }, "status" : { "type" : "string", @@ -75618,13 +75638,13 @@ "jobChanges" : { "type" : "array", "items" : { - "$ref" : "#/components/schemas/DqoJobChangeModel", - "originalRef" : "#/components/schemas/DqoJobChangeModel" + "originalRef" : "#/components/schemas/DqoJobChangeModel", + "$ref" : "#/components/schemas/DqoJobChangeModel" } }, "folderSynchronizationStatus" : { - "$ref" : "#/components/schemas/CloudSynchronizationFoldersStatusModel", - "originalRef" : "#/components/schemas/CloudSynchronizationFoldersStatusModel" + "originalRef" : "#/components/schemas/CloudSynchronizationFoldersStatusModel", + "$ref" : "#/components/schemas/CloudSynchronizationFoldersStatusModel" }, "lastSequenceNumber" : { "type" : "integer", @@ -75668,13 +75688,13 @@ "jobs" : { "type" : "array", "items" : { - "$ref" : "#/components/schemas/DqoJobHistoryEntryModel", - "originalRef" : "#/components/schemas/DqoJobHistoryEntryModel" + "originalRef" : "#/components/schemas/DqoJobHistoryEntryModel", + "$ref" : "#/components/schemas/DqoJobHistoryEntryModel" } }, "folderSynchronizationStatus" : { - "$ref" : "#/components/schemas/CloudSynchronizationFoldersStatusModel", - "originalRef" : "#/components/schemas/CloudSynchronizationFoldersStatusModel" + "originalRef" : "#/components/schemas/CloudSynchronizationFoldersStatusModel", + "$ref" : "#/components/schemas/CloudSynchronizationFoldersStatusModel" }, "lastSequenceNumber" : { "type" : "integer", @@ -75732,8 +75752,8 @@ }, "parentJobId" : { "description" : "Parent job id. Filled only for nested jobs, for example a sub-job that runs data quality checks on a single table.", - "$ref" : "#/components/schemas/DqoQueueJobId", - "originalRef" : "#/components/schemas/DqoQueueJobId" + "originalRef" : "#/components/schemas/DqoQueueJobId", + "$ref" : "#/components/schemas/DqoQueueJobId" }, "createdAt" : { "type" : "integer", @@ -76011,6 +76031,13 @@ "extensions" : { }, "exampleSetFlag" : false, "types" : [ "boolean" ] + }, + "can_use_ai_anomaly_detection" : { + "type" : "boolean", + "description" : "The DQOps instance is a paid version with advanced AI anomaly prediction.", + "extensions" : { }, + "exampleSetFlag" : false, + "types" : [ "boolean" ] } }, "description" : "The model that describes the current user and his access rights.", @@ -76150,6 +76177,10 @@ "can_synchronize_to_data_catalog" : { "type" : "boolean", "description" : "User can synchronize data to a data catalog. The instance must be configured correctly and the user must have at least an EDITOR role." + }, + "can_use_ai_anomaly_detection" : { + "type" : "boolean", + "description" : "The DQOps instance is a paid version with advanced AI anomaly prediction." } }, "description" : "The model that describes the current user and his access rights." @@ -76379,28 +76410,28 @@ }, "csv" : { "description" : "Csv file format specification.", - "$ref" : "#/components/schemas/CsvFileFormatSpec", - "originalRef" : "#/components/schemas/CsvFileFormatSpec" + "originalRef" : "#/components/schemas/CsvFileFormatSpec", + "$ref" : "#/components/schemas/CsvFileFormatSpec" }, "json" : { "description" : "Json file format specification.", - "$ref" : "#/components/schemas/JsonFileFormatSpec", - "originalRef" : "#/components/schemas/JsonFileFormatSpec" + "originalRef" : "#/components/schemas/JsonFileFormatSpec", + "$ref" : "#/components/schemas/JsonFileFormatSpec" }, "parquet" : { "description" : "Parquet file format specification.", - "$ref" : "#/components/schemas/ParquetFileFormatSpec", - "originalRef" : "#/components/schemas/ParquetFileFormatSpec" + "originalRef" : "#/components/schemas/ParquetFileFormatSpec", + "$ref" : "#/components/schemas/ParquetFileFormatSpec" }, "iceberg" : { "description" : "Iceberg file format specification.", - "$ref" : "#/components/schemas/IcebergFileFormatSpec", - "originalRef" : "#/components/schemas/IcebergFileFormatSpec" + "originalRef" : "#/components/schemas/IcebergFileFormatSpec", + "$ref" : "#/components/schemas/IcebergFileFormatSpec" }, "delta_lake" : { "description" : "Delta Lake file format specification.", - "$ref" : "#/components/schemas/DeltaLakeFileFormatSpec", - "originalRef" : "#/components/schemas/DeltaLakeFileFormatSpec" + "originalRef" : "#/components/schemas/DeltaLakeFileFormatSpec", + "$ref" : "#/components/schemas/DeltaLakeFileFormatSpec" }, "directories" : { "type" : "object", @@ -76518,8 +76549,8 @@ "units" : { "type" : "array", "items" : { - "$ref" : "#/components/schemas/TemporalUnit", - "originalRef" : "#/components/schemas/TemporalUnit" + "originalRef" : "#/components/schemas/TemporalUnit", + "$ref" : "#/components/schemas/TemporalUnit" } } } @@ -76591,8 +76622,8 @@ }, "time_until_execution" : { "description" : "Field value for the time left until the execution of scheduled checks.", - "$ref" : "#/components/schemas/Duration", - "originalRef" : "#/components/schemas/Duration" + "originalRef" : "#/components/schemas/Duration", + "$ref" : "#/components/schemas/Duration" }, "disabled" : { "type" : "boolean", @@ -77273,8 +77304,8 @@ "type" : "array", "description" : "Error samples entries", "items" : { - "$ref" : "#/components/schemas/ErrorSampleEntryModel", - "originalRef" : "#/components/schemas/ErrorSampleEntryModel" + "originalRef" : "#/components/schemas/ErrorSampleEntryModel", + "$ref" : "#/components/schemas/ErrorSampleEntryModel" } } } @@ -77391,8 +77422,8 @@ "type" : "array", "description" : "Error entries", "items" : { - "$ref" : "#/components/schemas/ErrorEntryModel", - "originalRef" : "#/components/schemas/ErrorEntryModel" + "originalRef" : "#/components/schemas/ErrorEntryModel", + "$ref" : "#/components/schemas/ErrorEntryModel" } } } @@ -77553,8 +77584,8 @@ "properties" : { "definition" : { "description" : "Field name that matches the field name (snake_case) used in the YAML specification.", - "$ref" : "#/components/schemas/ParameterDefinitionSpec", - "originalRef" : "#/components/schemas/ParameterDefinitionSpec" + "originalRef" : "#/components/schemas/ParameterDefinitionSpec", + "$ref" : "#/components/schemas/ParameterDefinitionSpec" }, "optional" : { "type" : "boolean", @@ -77669,28 +77700,28 @@ "properties" : { "csv" : { "description" : "Csv file format specification.", - "$ref" : "#/components/schemas/CsvFileFormatSpec", - "originalRef" : "#/components/schemas/CsvFileFormatSpec" + "originalRef" : "#/components/schemas/CsvFileFormatSpec", + "$ref" : "#/components/schemas/CsvFileFormatSpec" }, "json" : { "description" : "Json file format specification.", - "$ref" : "#/components/schemas/JsonFileFormatSpec", - "originalRef" : "#/components/schemas/JsonFileFormatSpec" + "originalRef" : "#/components/schemas/JsonFileFormatSpec", + "$ref" : "#/components/schemas/JsonFileFormatSpec" }, "parquet" : { "description" : "Parquet file format specification.", - "$ref" : "#/components/schemas/ParquetFileFormatSpec", - "originalRef" : "#/components/schemas/ParquetFileFormatSpec" + "originalRef" : "#/components/schemas/ParquetFileFormatSpec", + "$ref" : "#/components/schemas/ParquetFileFormatSpec" }, "iceberg" : { "description" : "Iceberg file format specification.", - "$ref" : "#/components/schemas/IcebergFileFormatSpec", - "originalRef" : "#/components/schemas/IcebergFileFormatSpec" + "originalRef" : "#/components/schemas/IcebergFileFormatSpec", + "$ref" : "#/components/schemas/IcebergFileFormatSpec" }, "delta_lake" : { "description" : "Delta Lake file format specification.", - "$ref" : "#/components/schemas/DeltaLakeFileFormatSpec", - "originalRef" : "#/components/schemas/DeltaLakeFileFormatSpec" + "originalRef" : "#/components/schemas/DeltaLakeFileFormatSpec", + "$ref" : "#/components/schemas/DeltaLakeFileFormatSpec" }, "file_paths" : { "type" : "array", @@ -77771,13 +77802,13 @@ }, "filter" : { "description" : "Notification filter specification for filtering the incident by the values of its fields.", - "$ref" : "#/components/schemas/NotificationFilterSpec", - "originalRef" : "#/components/schemas/NotificationFilterSpec" + "originalRef" : "#/components/schemas/NotificationFilterSpec", + "$ref" : "#/components/schemas/NotificationFilterSpec" }, "target" : { "description" : "Notification target addresses for each of the status.", - "$ref" : "#/components/schemas/IncidentNotificationTargetSpec", - "originalRef" : "#/components/schemas/IncidentNotificationTargetSpec" + "originalRef" : "#/components/schemas/IncidentNotificationTargetSpec", + "$ref" : "#/components/schemas/IncidentNotificationTargetSpec" }, "priority" : { "type" : "integer", @@ -77861,13 +77892,13 @@ "properties" : { "filter" : { "description" : "Notification filter specification for filtering the incident by the values of its fields.", - "$ref" : "#/components/schemas/NotificationFilterSpec", - "originalRef" : "#/components/schemas/NotificationFilterSpec" + "originalRef" : "#/components/schemas/NotificationFilterSpec", + "$ref" : "#/components/schemas/NotificationFilterSpec" }, "target" : { "description" : "Notification target addresses for each of the status.", - "$ref" : "#/components/schemas/IncidentNotificationTargetSpec", - "originalRef" : "#/components/schemas/IncidentNotificationTargetSpec" + "originalRef" : "#/components/schemas/IncidentNotificationTargetSpec", + "$ref" : "#/components/schemas/IncidentNotificationTargetSpec" }, "priority" : { "type" : "integer", @@ -78273,13 +78304,13 @@ "properties" : { "jobId" : { "description" : "Job id that identifies a job that was started on the DQOps job queue.", - "$ref" : "#/components/schemas/DqoQueueJobId", - "originalRef" : "#/components/schemas/DqoQueueJobId" + "originalRef" : "#/components/schemas/DqoQueueJobId", + "$ref" : "#/components/schemas/DqoQueueJobId" }, "result" : { "description" : "Optional result object that is returned only when the wait parameter was true and the \"import tables\" job has finished. Contains the summary result of importing tables, including table and column schemas of imported tables. ", - "$ref" : "#/components/schemas/ImportTablesResult", - "originalRef" : "#/components/schemas/ImportTablesResult" + "originalRef" : "#/components/schemas/ImportTablesResult", + "$ref" : "#/components/schemas/ImportTablesResult" }, "status" : { "type" : "string", @@ -78316,8 +78347,8 @@ "type" : "array", "description" : "Table schemas (including column schemas) of imported tables.", "items" : { - "$ref" : "#/components/schemas/TableSpec", - "originalRef" : "#/components/schemas/TableSpec" + "originalRef" : "#/components/schemas/TableSpec", + "$ref" : "#/components/schemas/TableSpec" } } }, @@ -78781,8 +78812,8 @@ "type" : "object", "description" : "Filtered notifications map with filter configuration and notification addresses treated with higher priority than those from the current class.", "additionalProperties" : { - "$ref" : "#/definitions/FilteredNotificationSpec", - "originalRef" : "#/definitions/FilteredNotificationSpec" + "originalRef" : "#/definitions/FilteredNotificationSpec", + "$ref" : "#/definitions/FilteredNotificationSpec" } } } @@ -78870,18 +78901,18 @@ "properties" : { "warningCounts" : { "description" : "Counts for the warning severity level.", - "$ref" : "#/components/schemas/IncidentCountsModel", - "originalRef" : "#/components/schemas/IncidentCountsModel" + "originalRef" : "#/components/schemas/IncidentCountsModel", + "$ref" : "#/components/schemas/IncidentCountsModel" }, "errorCounts" : { "description" : "Counts for the error severity level.", - "$ref" : "#/components/schemas/IncidentCountsModel", - "originalRef" : "#/components/schemas/IncidentCountsModel" + "originalRef" : "#/components/schemas/IncidentCountsModel", + "$ref" : "#/components/schemas/IncidentCountsModel" }, "fatalCounts" : { "description" : "Counts for the fatal severity level.", - "$ref" : "#/components/schemas/IncidentCountsModel", - "originalRef" : "#/components/schemas/IncidentCountsModel" + "originalRef" : "#/components/schemas/IncidentCountsModel", + "$ref" : "#/components/schemas/IncidentCountsModel" } } } @@ -79055,8 +79086,8 @@ "type" : "object", "description" : "A map of the numbers of data quality issues per day, the day uses the DQOps server timezone.", "additionalProperties" : { - "$ref" : "#/definitions/HistogramDailyIssuesCount", - "originalRef" : "#/definitions/HistogramDailyIssuesCount" + "originalRef" : "#/definitions/HistogramDailyIssuesCount", + "$ref" : "#/definitions/HistogramDailyIssuesCount" } }, "columns" : { @@ -80137,8 +80168,8 @@ }, "single_store_db_parameters_spec" : { "description" : "Single Store DB parameters spec.", - "$ref" : "#/components/schemas/SingleStoreDbParametersSpec", - "originalRef" : "#/components/schemas/SingleStoreDbParametersSpec" + "originalRef" : "#/components/schemas/SingleStoreDbParametersSpec", + "$ref" : "#/components/schemas/SingleStoreDbParametersSpec" }, "mysql_engine_type" : { "type" : "string", @@ -80557,7 +80588,7 @@ "display_hint" : { "type" : "string", "description" : "UI control display hint.", - "enum" : [ "textarea", "column_names" ] + "enum" : [ "textarea", "column_names", "requires_paid_version" ] }, "required" : { "type" : "boolean", @@ -81097,7 +81128,7 @@ "provider_type" : { "type" : "string", "description" : "Provider type.", - "enum" : [ "bigquery", "databricks", "mysql", "oracle", "postgresql", "duckdb", "presto", "redshift", "snowflake", "spark", "sqlserver", "trino", "hana", "db2", "mariadb", "clickhouse", "questdb" ] + "enum" : [ "bigquery", "clickhouse", "databricks", "db2", "duckdb", "hana", "mariadb", "mysql", "oracle", "postgresql", "presto", "questdb", "redshift", "snowflake", "spark", "sqlserver", "teradata", "trino" ] }, "custom" : { "type" : "boolean", @@ -81179,12 +81210,12 @@ "providerType" : { "type" : "string", "description" : "Provider type.", - "enum" : [ "bigquery", "databricks", "mysql", "oracle", "postgresql", "duckdb", "presto", "redshift", "snowflake", "spark", "sqlserver", "trino", "hana", "db2", "mariadb", "clickhouse", "questdb" ] + "enum" : [ "bigquery", "clickhouse", "databricks", "db2", "duckdb", "hana", "mariadb", "mysql", "oracle", "postgresql", "presto", "questdb", "redshift", "snowflake", "spark", "sqlserver", "teradata", "trino" ] }, "providerSensorDefinitionSpec" : { "description" : "Provider specific sensor definition specification", - "$ref" : "#/components/schemas/ProviderSensorDefinitionSpec", - "originalRef" : "#/components/schemas/ProviderSensorDefinitionSpec" + "originalRef" : "#/components/schemas/ProviderSensorDefinitionSpec", + "$ref" : "#/components/schemas/ProviderSensorDefinitionSpec" }, "sqlTemplate" : { "type" : "string", @@ -81294,19 +81325,19 @@ "type" : "array", "description" : "List of data quality checks within the category.", "items" : { - "$ref" : "#/components/schemas/CheckModel", - "originalRef" : "#/components/schemas/CheckModel" + "originalRef" : "#/components/schemas/CheckModel", + "$ref" : "#/components/schemas/CheckModel" } }, "run_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to start the job.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "data_clean_job_template" : { "description" : "Configured parameters for the \"data clean\" job that after being supplied with a time range should be pushed to the job queue in order to remove stored results connected with this quality category.", - "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters", - "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters" + "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters", + "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters" } }, "description" : "Model that returns the form definition and the form data to edit all checks within a single category." @@ -81680,23 +81711,23 @@ "type" : "object", "description" : "A dictionary of nested folders with rules, the keys are the folder names.", "additionalProperties" : { - "$ref" : "#/definitions/RuleFolderModel", - "originalRef" : "#/definitions/RuleFolderModel" + "originalRef" : "#/definitions/RuleFolderModel", + "$ref" : "#/definitions/RuleFolderModel" } }, "rules" : { "type" : "array", "description" : "List of rules defined in this folder.", "items" : { - "$ref" : "#/components/schemas/RuleListModel", - "originalRef" : "#/components/schemas/RuleListModel" + "originalRef" : "#/components/schemas/RuleListModel", + "$ref" : "#/components/schemas/RuleListModel" } }, "all_rules" : { "type" : "array", "items" : { - "$ref" : "#/components/schemas/RuleListModel", - "originalRef" : "#/components/schemas/RuleListModel" + "originalRef" : "#/components/schemas/RuleListModel", + "$ref" : "#/components/schemas/RuleListModel" } } }, @@ -81908,15 +81939,15 @@ }, "time_window" : { "description" : "Rule time window configuration when the mode is previous_readouts. Configures the number of past time windows (sensor readouts) that are passes as a parameter to the rule. For example, to calculate the average or perform prediction on historic data.", - "$ref" : "#/components/schemas/RuleTimeWindowSettingsSpec", - "originalRef" : "#/components/schemas/RuleTimeWindowSettingsSpec" + "originalRef" : "#/components/schemas/RuleTimeWindowSettingsSpec", + "$ref" : "#/components/schemas/RuleTimeWindowSettingsSpec" }, "fields" : { "type" : "array", "description" : "List of fields that are parameters of a custom rule. Those fields are used by the DQOps UI to display the data quality check editing screens with proper UI controls for all required fields.", "items" : { - "$ref" : "#/components/schemas/ParameterDefinitionSpec", - "originalRef" : "#/components/schemas/ParameterDefinitionSpec" + "originalRef" : "#/components/schemas/ParameterDefinitionSpec", + "$ref" : "#/components/schemas/ParameterDefinitionSpec" } }, "parameters" : { @@ -81997,8 +82028,8 @@ "type" : "array", "description" : "List of fields for editing the rule parameters like thresholds.", "items" : { - "$ref" : "#/components/schemas/FieldModel", - "originalRef" : "#/components/schemas/FieldModel" + "originalRef" : "#/components/schemas/FieldModel", + "$ref" : "#/components/schemas/FieldModel" } }, "disabled" : { @@ -82040,18 +82071,18 @@ "properties" : { "error" : { "description" : "Rule parameters for the error severity rule.", - "$ref" : "#/components/schemas/RuleParametersModel", - "originalRef" : "#/components/schemas/RuleParametersModel" + "originalRef" : "#/components/schemas/RuleParametersModel", + "$ref" : "#/components/schemas/RuleParametersModel" }, "warning" : { "description" : "Rule parameters for the warning severity rule.", - "$ref" : "#/components/schemas/RuleParametersModel", - "originalRef" : "#/components/schemas/RuleParametersModel" + "originalRef" : "#/components/schemas/RuleParametersModel", + "$ref" : "#/components/schemas/RuleParametersModel" }, "fatal" : { "description" : "Rule parameters for the fatal severity rule.", - "$ref" : "#/components/schemas/RuleParametersModel", - "originalRef" : "#/components/schemas/RuleParametersModel" + "originalRef" : "#/components/schemas/RuleParametersModel", + "$ref" : "#/components/schemas/RuleParametersModel" } }, "description" : "Model that returns the form definition and the form data to edit a single rule with all three threshold levels (low, medium, high)." @@ -82178,18 +82209,18 @@ }, "table" : { "description" : "The full physical name (schema.table) of the target table.", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "check_search_filters" : { "description" : "Target data quality checks filter.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "time_window_filter" : { "description" : "Optional time window filter, configures the time range that is analyzed or the number of recent days/months to analyze for day or month partitioned data.", - "$ref" : "#/components/schemas/TimeWindowFilterParameters", - "originalRef" : "#/components/schemas/TimeWindowFilterParameters" + "originalRef" : "#/components/schemas/TimeWindowFilterParameters", + "$ref" : "#/components/schemas/TimeWindowFilterParameters" }, "dummy_execution" : { "type" : "boolean", @@ -82206,8 +82237,8 @@ }, "run_checks_result" : { "description" : "The result of running the check, updated when the run checks job finishes. Contains the count of executed checks.", - "$ref" : "#/components/schemas/RunChecksResult", - "originalRef" : "#/components/schemas/RunChecksResult" + "originalRef" : "#/components/schemas/RunChecksResult", + "$ref" : "#/components/schemas/RunChecksResult" } }, "description" : "Run checks configuration for a job that will run checks on a single table, specifies the target table and the target checks that should be executed and an optional time window." @@ -82258,13 +82289,13 @@ "properties" : { "check_search_filters" : { "description" : "Target data quality checks filter.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "time_window_filter" : { "description" : "Optional time window filter, configures the time range that is analyzed or the number of recent days/months to analyze for day or month partitioned data.", - "$ref" : "#/components/schemas/TimeWindowFilterParameters", - "originalRef" : "#/components/schemas/TimeWindowFilterParameters" + "originalRef" : "#/components/schemas/TimeWindowFilterParameters", + "$ref" : "#/components/schemas/TimeWindowFilterParameters" }, "collect_error_samples" : { "type" : "boolean", @@ -82281,8 +82312,8 @@ }, "run_checks_result" : { "description" : "The result of running the check, updated when the run checks job finishes. Contains the count of executed checks.", - "$ref" : "#/components/schemas/RunChecksResult", - "originalRef" : "#/components/schemas/RunChecksResult" + "originalRef" : "#/components/schemas/RunChecksResult", + "$ref" : "#/components/schemas/RunChecksResult" } }, "description" : "Run checks configuration, specifies the target checks that should be executed and an optional time window." @@ -82314,13 +82345,13 @@ "properties" : { "jobId" : { "description" : "Job id that identifies a job that was started on the DQOps job queue.", - "$ref" : "#/components/schemas/DqoQueueJobId", - "originalRef" : "#/components/schemas/DqoQueueJobId" + "originalRef" : "#/components/schemas/DqoQueueJobId", + "$ref" : "#/components/schemas/DqoQueueJobId" }, "result" : { "description" : "Optional result object that is returned only when the wait parameter was true and the \"run checks\" job has finished. Contains the summary result of the data quality checks executed, including the severity of the most severe issue detected. The calling code (the data pipeline) can decide if further processing should be continued.", - "$ref" : "#/components/schemas/RunChecksResult", - "originalRef" : "#/components/schemas/RunChecksResult" + "originalRef" : "#/components/schemas/RunChecksResult", + "$ref" : "#/components/schemas/RunChecksResult" }, "status" : { "type" : "string", @@ -82547,38 +82578,38 @@ }, "run_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run all checks within this schema.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "run_profiling_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run profiling checks within this schema.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "run_monitoring_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run monitoring checks within this schema.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "run_partition_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run partition partitioned checks within this schema.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "collect_statistics_job_template" : { "description" : "Configured parameters for the \"collect statistics\" job that should be pushed to the job queue in order to run all statistics collectors within this schema.", - "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters", - "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters" + "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters", + "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters" }, "import_table_job_parameters" : { "description" : "Job parameters for the import tables job that will import all tables from this schema.", - "$ref" : "#/components/schemas/ImportTablesQueueJobParameters", - "originalRef" : "#/components/schemas/ImportTablesQueueJobParameters" + "originalRef" : "#/components/schemas/ImportTablesQueueJobParameters", + "$ref" : "#/components/schemas/ImportTablesQueueJobParameters" }, "data_clean_job_template" : { "description" : "Configured parameters for the \"data clean\" job that after being supplied with a time range should be pushed to the job queue in order to remove stored results connected with this schema.", - "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters", - "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters" + "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters", + "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters" }, "can_edit" : { "type" : "boolean", @@ -82654,8 +82685,8 @@ }, "importTableJobParameters" : { "description" : "Job parameters for the import tables job that will import all tables from this schema.", - "$ref" : "#/components/schemas/ImportTablesQueueJobParameters", - "originalRef" : "#/components/schemas/ImportTablesQueueJobParameters" + "originalRef" : "#/components/schemas/ImportTablesQueueJobParameters", + "$ref" : "#/components/schemas/ImportTablesQueueJobParameters" } }, "description" : "Schema remote model" @@ -82725,8 +82756,8 @@ "type" : "array", "description" : "List of fields that are parameters of a custom sensor. Those fields are used by the DQOps UI to display the data quality check editing screens with proper UI controls for all required fields.", "items" : { - "$ref" : "#/components/schemas/ParameterDefinitionSpec", - "originalRef" : "#/components/schemas/ParameterDefinitionSpec" + "originalRef" : "#/components/schemas/ParameterDefinitionSpec", + "$ref" : "#/components/schemas/ParameterDefinitionSpec" } }, "requires_event_timestamp" : { @@ -82805,23 +82836,23 @@ "type" : "object", "description" : "A dictionary of nested folders with sensors, the keys are the folder names.", "additionalProperties" : { - "$ref" : "#/definitions/SensorFolderModel", - "originalRef" : "#/definitions/SensorFolderModel" + "originalRef" : "#/definitions/SensorFolderModel", + "$ref" : "#/definitions/SensorFolderModel" } }, "sensors" : { "type" : "array", "description" : "List of sensors defined in this folder.", "items" : { - "$ref" : "#/components/schemas/SensorListModel", - "originalRef" : "#/components/schemas/SensorListModel" + "originalRef" : "#/components/schemas/SensorListModel", + "$ref" : "#/components/schemas/SensorListModel" } }, "all_sensors" : { "type" : "array", "items" : { - "$ref" : "#/components/schemas/SensorListModel", - "originalRef" : "#/components/schemas/SensorListModel" + "originalRef" : "#/components/schemas/SensorListModel", + "$ref" : "#/components/schemas/SensorListModel" } } }, @@ -82916,8 +82947,8 @@ "type" : "array", "description" : "List of provider (database) specific models.", "items" : { - "$ref" : "#/components/schemas/ProviderSensorListModel", - "originalRef" : "#/components/schemas/ProviderSensorListModel" + "originalRef" : "#/components/schemas/ProviderSensorListModel", + "$ref" : "#/components/schemas/ProviderSensorListModel" } }, "yaml_parsing_error" : { @@ -82996,15 +83027,15 @@ }, "sensor_definition_spec" : { "description" : "Sensor definition specification.", - "$ref" : "#/components/schemas/SensorDefinitionSpec", - "originalRef" : "#/components/schemas/SensorDefinitionSpec" + "originalRef" : "#/components/schemas/SensorDefinitionSpec", + "$ref" : "#/components/schemas/SensorDefinitionSpec" }, "provider_sensor_list" : { "type" : "array", "description" : "Provider sensors list with provider specific sensor definitions.", "items" : { - "$ref" : "#/components/schemas/ProviderSensorModel", - "originalRef" : "#/components/schemas/ProviderSensorModel" + "originalRef" : "#/components/schemas/ProviderSensorModel", + "$ref" : "#/components/schemas/ProviderSensorModel" } }, "custom" : { @@ -83332,8 +83363,8 @@ "type" : "array", "description" : "Sensor readout entries", "items" : { - "$ref" : "#/components/schemas/SensorReadoutEntryModel", - "originalRef" : "#/components/schemas/SensorReadoutEntryModel" + "originalRef" : "#/components/schemas/SensorReadoutEntryModel", + "$ref" : "#/components/schemas/SensorReadoutEntryModel" } } } @@ -84218,8 +84249,8 @@ "collectorsHierarchyIdsModels" : { "type" : "array", "items" : { - "$ref" : "#/components/schemas/HierarchyIdModel", - "originalRef" : "#/components/schemas/HierarchyIdModel" + "originalRef" : "#/components/schemas/HierarchyIdModel", + "$ref" : "#/components/schemas/HierarchyIdModel" } } } @@ -84567,8 +84598,8 @@ "properties" : { "jobId" : { "description" : "Job id that identifies a job that was started on the DQOps job queue.", - "$ref" : "#/components/schemas/DqoQueueJobId", - "originalRef" : "#/components/schemas/DqoQueueJobId" + "originalRef" : "#/components/schemas/DqoQueueJobId", + "$ref" : "#/components/schemas/DqoQueueJobId" }, "status" : { "type" : "string", @@ -84594,8 +84625,8 @@ "type" : "object", "properties" : { "synchronizationParameter" : { - "$ref" : "#/components/schemas/SynchronizeRootFolderParameters", - "originalRef" : "#/components/schemas/SynchronizeRootFolderParameters" + "originalRef" : "#/components/schemas/SynchronizeRootFolderParameters", + "$ref" : "#/components/schemas/SynchronizeRootFolderParameters" } } } @@ -84671,14 +84702,14 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_total_row_count_match_percent" : { "description" : "Verifies the total ow count of a tested table and compares it to a row count of a reference table. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentCheckSpec", + "$ref" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentCheckSpec" } } } @@ -84717,14 +84748,14 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_total_row_count_match_percent" : { "description" : "Verifies the total row count of a tested table and compares it to a row count of a reference table. Stores the most recent check result for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentCheckSpec", + "$ref" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentCheckSpec" } } } @@ -84763,14 +84794,14 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_total_row_count_match_percent" : { "description" : "Verifies that the total row count of the tested table matches the total row count of another (reference) table.", - "$ref" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentCheckSpec", - "originalRef" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentCheckSpec" + "originalRef" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentCheckSpec", + "$ref" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentCheckSpec" } } } @@ -84879,15 +84910,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -84924,23 +84955,23 @@ }, "parameters" : { "description" : "Data quality check parameters. Fill the parameters to provide the name of the referenced table.", - "$ref" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentSensorParametersSpec", + "$ref" : "#/components/schemas/TableAccuracyTotalRowCountMatchPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum percentage of difference of row count of a table column and of a row count of another table column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" } } } @@ -85083,15 +85114,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -85128,23 +85159,23 @@ }, "parameters" : { "description" : "Table availability sensor parameters", - "$ref" : "#/components/schemas/TableAvailabilitySensorParametersSpec", - "originalRef" : "#/components/schemas/TableAvailabilitySensorParametersSpec" + "originalRef" : "#/components/schemas/TableAvailabilitySensorParametersSpec", + "$ref" : "#/components/schemas/TableAvailabilitySensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxFailuresRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxFailuresRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxFailuresRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxFailuresRule0ParametersSpec" }, "error" : { "description" : "Default alerting threshold with the maximum number of consecutive table availability issues that raises a data quality error (alert)", - "$ref" : "#/components/schemas/MaxFailuresRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxFailuresRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxFailuresRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxFailuresRule1ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxFailuresRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxFailuresRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxFailuresRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxFailuresRule5ParametersSpec" } } } @@ -85183,14 +85214,14 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_table_availability" : { "description" : "Verifies availability of a table in a monitored database using a simple query. Stores the most recent table availability status for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableAvailabilityCheckSpec", - "originalRef" : "#/components/schemas/TableAvailabilityCheckSpec" + "originalRef" : "#/components/schemas/TableAvailabilityCheckSpec", + "$ref" : "#/components/schemas/TableAvailabilityCheckSpec" } } } @@ -85229,14 +85260,14 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_table_availability" : { "description" : "Verifies availability of a table in a monitored database using a simple query. Stores the most recent table availability status for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableAvailabilityCheckSpec", - "originalRef" : "#/components/schemas/TableAvailabilityCheckSpec" + "originalRef" : "#/components/schemas/TableAvailabilityCheckSpec", + "$ref" : "#/components/schemas/TableAvailabilityCheckSpec" } } } @@ -85275,14 +85306,14 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_table_availability" : { "description" : "Verifies availability of a table in a monitored database using a simple query.", - "$ref" : "#/components/schemas/TableAvailabilityCheckSpec", - "originalRef" : "#/components/schemas/TableAvailabilityCheckSpec" + "originalRef" : "#/components/schemas/TableAvailabilityCheckSpec", + "$ref" : "#/components/schemas/TableAvailabilityCheckSpec" } } } @@ -85454,21 +85485,21 @@ }, "table" : { "description" : "Physical table name including the schema and table names.", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "column_statistics" : { "type" : "array", "description" : "List of collected column level statistics for all columns.", "items" : { - "$ref" : "#/components/schemas/ColumnStatisticsModel", - "originalRef" : "#/components/schemas/ColumnStatisticsModel" + "originalRef" : "#/components/schemas/ColumnStatisticsModel", + "$ref" : "#/components/schemas/ColumnStatisticsModel" } }, "collect_column_statistics_job_template" : { "description" : "Configured parameters for the \"collect statistics\" job that should be pushed to the job queue in order to run all statistics collectors for all columns on this table.", - "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters", - "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters" + "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters", + "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters" }, "can_collect_statistics" : { "type" : "boolean", @@ -85582,15 +85613,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -85627,23 +85658,23 @@ }, "parameters" : { "description" : "Column count data quality sensor.", - "$ref" : "#/components/schemas/TableColumnCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableColumnCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableColumnCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableColumnCountSensorParametersSpec" }, "warning" : { "description" : "Warning level threshold to raise a data quality incident with a warning severity level when the column count in the parent table and the reference table do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" }, "error" : { "description" : "Error level threshold to raise a data quality incident with an error severity level when the column count in the parent table and the reference table do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" }, "fatal" : { "description" : "Fatal level threshold to raise a data quality incident with a fatal severity level when the column count in the parent table and the reference table do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" } } } @@ -85689,8 +85720,8 @@ "type" : "object", "description" : "The dictionary of comparison results between the tables for the specific column. The keys for the dictionary are check names. The values are summaries of the most recent comparison on this column.", "additionalProperties" : { - "$ref" : "#/definitions/ComparisonCheckResultModel", - "originalRef" : "#/definitions/ComparisonCheckResultModel" + "originalRef" : "#/definitions/ComparisonCheckResultModel", + "$ref" : "#/definitions/ComparisonCheckResultModel" } } }, @@ -85803,8 +85834,8 @@ }, "compared_table" : { "description" : "The schema and table name of the compared table that is verified.", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "reference_connection" : { "type" : "string", @@ -85812,8 +85843,8 @@ }, "reference_table" : { "description" : "The schema and table name of the reference table that has the expected data.", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "check_type" : { "type" : "string", @@ -85837,8 +85868,8 @@ "type" : "array", "description" : "List of column pairs from both the compared table and the reference table that are used in a GROUP BY clause for grouping both the compared table and the reference table (the source of truth). The columns are used in the next of the table comparison to join the results of data groups (row counts, sums of columns) between the compared table and the reference table to compare the differences.", "items" : { - "$ref" : "#/components/schemas/TableComparisonGroupingColumnPairModel", - "originalRef" : "#/components/schemas/TableComparisonGroupingColumnPairModel" + "originalRef" : "#/components/schemas/TableComparisonGroupingColumnPairModel", + "$ref" : "#/components/schemas/TableComparisonGroupingColumnPairModel" } }, "can_edit" : { @@ -85955,8 +85986,8 @@ "type" : "array", "description" : "List of column pairs from both the compared table and the reference table that are used in a GROUP BY clause for grouping both the compared table and the reference table (the source of truth). The columns are used in the next of the table comparison to join the results of data groups (row counts, sums of columns) between the compared table and the reference table to compare the differences.", "items" : { - "$ref" : "#/components/schemas/TableComparisonGroupingColumnsPairSpec", - "originalRef" : "#/components/schemas/TableComparisonGroupingColumnsPairSpec" + "originalRef" : "#/components/schemas/TableComparisonGroupingColumnsPairSpec", + "$ref" : "#/components/schemas/TableComparisonGroupingColumnsPairSpec" } } } @@ -86001,19 +86032,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_row_count_match" : { "description" : "Verifies that the row count of the tested (parent) table matches the row count of the reference table. Compares each group of data with a GROUP BY clause. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec", - "originalRef" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec" + "originalRef" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec", + "$ref" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec" }, "daily_column_count_match" : { "description" : "Verifies that the column count of the tested (parent) table matches the column count of the reference table. Only one comparison result is returned, without data grouping. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableComparisonColumnCountMatchCheckSpec", - "originalRef" : "#/components/schemas/TableComparisonColumnCountMatchCheckSpec" + "originalRef" : "#/components/schemas/TableComparisonColumnCountMatchCheckSpec", + "$ref" : "#/components/schemas/TableComparisonColumnCountMatchCheckSpec" } } } @@ -86052,14 +86083,14 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_row_count_match" : { "description" : "Verifies that the row count of the tested (parent) table matches the row count of the reference table. Compares each group of data with a GROUP BY clause on the time period (the daily partition) and all other data grouping columns. Stores the most recent captured value for each daily partition that was analyzed.", - "$ref" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec", - "originalRef" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec" + "originalRef" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec", + "$ref" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec" } } } @@ -86276,8 +86307,8 @@ }, "compared_table" : { "description" : "The schema and table name of the compared table that is verified.", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "reference_connection" : { "type" : "string", @@ -86285,8 +86316,8 @@ }, "reference_table" : { "description" : "The schema and table name of the reference table that has the expected data.", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "compared_table_filter" : { "type" : "string", @@ -86300,24 +86331,24 @@ "type" : "array", "description" : "List of column pairs from both the compared table and the reference table that are used in a GROUP BY clause for grouping both the compared table and the reference table (the source of truth). The columns are used in the next of the table comparison to join the results of data groups (row counts, sums of columns) between the compared table and the reference table to compare the differences.", "items" : { - "$ref" : "#/components/schemas/TableComparisonGroupingColumnPairModel", - "originalRef" : "#/components/schemas/TableComparisonGroupingColumnPairModel" + "originalRef" : "#/components/schemas/TableComparisonGroupingColumnPairModel", + "$ref" : "#/components/schemas/TableComparisonGroupingColumnPairModel" } }, "default_compare_thresholds" : { "description" : "The template of the compare thresholds that should be applied to all comparisons when the comparison is enabled.", - "$ref" : "#/components/schemas/CompareThresholdsModel", - "originalRef" : "#/components/schemas/CompareThresholdsModel" + "originalRef" : "#/components/schemas/CompareThresholdsModel", + "$ref" : "#/components/schemas/CompareThresholdsModel" }, "compare_row_count" : { "description" : "The row count comparison configuration.", - "$ref" : "#/components/schemas/CompareThresholdsModel", - "originalRef" : "#/components/schemas/CompareThresholdsModel" + "originalRef" : "#/components/schemas/CompareThresholdsModel", + "$ref" : "#/components/schemas/CompareThresholdsModel" }, "compare_column_count" : { "description" : "The column count comparison configuration.", - "$ref" : "#/components/schemas/CompareThresholdsModel", - "originalRef" : "#/components/schemas/CompareThresholdsModel" + "originalRef" : "#/components/schemas/CompareThresholdsModel", + "$ref" : "#/components/schemas/CompareThresholdsModel" }, "supports_compare_column_count" : { "type" : "boolean", @@ -86327,19 +86358,19 @@ "type" : "array", "description" : "The list of compared columns, their matching reference column and the enabled comparisons.", "items" : { - "$ref" : "#/components/schemas/ColumnComparisonModel", - "originalRef" : "#/components/schemas/ColumnComparisonModel" + "originalRef" : "#/components/schemas/ColumnComparisonModel", + "$ref" : "#/components/schemas/ColumnComparisonModel" } }, "compare_table_run_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run the table comparison checks for this table, using checks selected in this model.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "compare_table_clean_data_job_template" : { "description" : "Configured parameters for the \"data clean\" job that after being supplied with a time range should be pushed to the job queue in order to remove stored check results for this table comparison.", - "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters", - "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters" + "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters", + "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters" }, "can_edit" : { "type" : "boolean", @@ -86396,19 +86427,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_row_count_match" : { "description" : "Verifies that the row count of the tested (parent) table matches the row count of the reference table. Compares each group of data with a GROUP BY clause. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec", - "originalRef" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec" + "originalRef" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec", + "$ref" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec" }, "monthly_column_count_match" : { "description" : "Verifies that the column count of the tested (parent) table matches the column count of the reference table. Only one comparison result is returned, without data grouping. Stores the most recent captured value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableComparisonColumnCountMatchCheckSpec", - "originalRef" : "#/components/schemas/TableComparisonColumnCountMatchCheckSpec" + "originalRef" : "#/components/schemas/TableComparisonColumnCountMatchCheckSpec", + "$ref" : "#/components/schemas/TableComparisonColumnCountMatchCheckSpec" } } } @@ -86447,14 +86478,14 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_row_count_match" : { "description" : "Verifies that the row count of the tested (parent) table matches the row count of the reference table, for each monthly partition (grouping rows by the time period, truncated to the month). Compares each group of data with a GROUP BY clause. Stores the most recent captured value for each monthly partition and optionally data groups.", - "$ref" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec", - "originalRef" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec" + "originalRef" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec", + "$ref" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec" } } } @@ -86498,19 +86529,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_row_count_match" : { "description" : "Verifies that the row count of the tested (parent) table matches the row count of the reference table. Compares each group of data with a GROUP BY clause.", - "$ref" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec", - "originalRef" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec" + "originalRef" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec", + "$ref" : "#/components/schemas/TableComparisonRowCountMatchCheckSpec" }, "profile_column_count_match" : { "description" : "Verifies that the column count of the tested (parent) table matches the column count of the reference table. Only one comparison result is returned, without data grouping.", - "$ref" : "#/components/schemas/TableComparisonColumnCountMatchCheckSpec", - "originalRef" : "#/components/schemas/TableComparisonColumnCountMatchCheckSpec" + "originalRef" : "#/components/schemas/TableComparisonColumnCountMatchCheckSpec", + "$ref" : "#/components/schemas/TableComparisonColumnCountMatchCheckSpec" } } } @@ -86561,16 +86592,16 @@ "type" : "object", "description" : "The dictionary of comparison results between the tables for table level comparisons (e.g. row count). The keys for the dictionary are the check names. The value in the dictionary is a summary information about the most recent comparison.", "additionalProperties" : { - "$ref" : "#/definitions/ComparisonCheckResultModel", - "originalRef" : "#/definitions/ComparisonCheckResultModel" + "originalRef" : "#/definitions/ComparisonCheckResultModel", + "$ref" : "#/definitions/ComparisonCheckResultModel" } }, "column_comparison_results" : { "type" : "object", "description" : "The dictionary of comparison results between the tables for each compared column. The keys for the dictionary are the column names. The values are dictionaries of the data quality check names and their results.", "additionalProperties" : { - "$ref" : "#/definitions/TableComparisonColumnResultsModel", - "originalRef" : "#/definitions/TableComparisonColumnResultsModel" + "originalRef" : "#/definitions/TableComparisonColumnResultsModel", + "$ref" : "#/definitions/TableComparisonColumnResultsModel" } } }, @@ -86681,15 +86712,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -86726,23 +86757,23 @@ }, "parameters" : { "description" : "Row count data quality sensor.", - "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" }, "warning" : { "description" : "Warning level threshold to raise a data quality incident with a warning severity level when the row count in the parent table and the reference table do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule0ParametersSpec" }, "error" : { "description" : "Error level threshold to raise a data quality incident with an error severity level when the row count in the parent table and the reference table do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule1ParametersSpec" }, "fatal" : { "description" : "Fatal level threshold to raise a data quality incident with a fatal severity level when the row count in the parent table and the reference table do not match. The alert is generated for every compared group of rows (when data grouping is enabled).", - "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxDiffPercentRule5ParametersSpec" } } } @@ -87008,24 +87039,24 @@ "type" : "object", "description" : "The dictionary of statuses for data quality checks. The keys are data quality check names, the values are the current data quality check statuses that describe the most current status.", "additionalProperties" : { - "$ref" : "#/definitions/CheckCurrentDataQualityStatusModel", - "originalRef" : "#/definitions/CheckCurrentDataQualityStatusModel" + "originalRef" : "#/definitions/CheckCurrentDataQualityStatusModel", + "$ref" : "#/definitions/CheckCurrentDataQualityStatusModel" } }, "columns" : { "type" : "object", "description" : "Dictionary of data statues for all columns that have any known data quality results. The keys in the dictionary are the column names.", "additionalProperties" : { - "$ref" : "#/definitions/ColumnCurrentDataQualityStatusModel", - "originalRef" : "#/definitions/ColumnCurrentDataQualityStatusModel" + "originalRef" : "#/definitions/ColumnCurrentDataQualityStatusModel", + "$ref" : "#/definitions/ColumnCurrentDataQualityStatusModel" } }, "dimensions" : { "type" : "object", "description" : "Dictionary of the current data quality statues for each data quality dimension.", "additionalProperties" : { - "$ref" : "#/definitions/DimensionCurrentDataQualityStatusModel", - "originalRef" : "#/definitions/DimensionCurrentDataQualityStatusModel" + "originalRef" : "#/definitions/DimensionCurrentDataQualityStatusModel", + "$ref" : "#/definitions/DimensionCurrentDataQualityStatusModel" } }, "table_exist" : { @@ -87085,29 +87116,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_sql_condition_failed_on_table" : { "description" : "Verifies that a custom SQL expression is met for each row. Counts the number of rows where the expression is not satisfied, and raises an issue if too many failures were detected. This check is used also to compare values between columns: `{alias}.col_price > {alias}.col_tax`. Stores the most recent count of failed rows for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableSqlConditionFailedCheckSpec", - "originalRef" : "#/components/schemas/TableSqlConditionFailedCheckSpec" + "originalRef" : "#/components/schemas/TableSqlConditionFailedCheckSpec", + "$ref" : "#/components/schemas/TableSqlConditionFailedCheckSpec" }, "daily_sql_condition_passed_percent_on_table" : { "description" : "Verifies that a minimum percentage of rows passed a custom SQL condition (expression). Reference the current table by using tokens, for example: `{alias}.col_price > {alias}.col_tax`. Stores the most recent captured percentage for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec", - "originalRef" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec" + "originalRef" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec", + "$ref" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec" }, "daily_sql_aggregate_expression_on_table" : { "description" : "Verifies that a custom aggregated SQL expression (MIN, MAX, etc.) is not outside the expected range. Stores the most recent captured value for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec", - "originalRef" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec" + "originalRef" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec", + "$ref" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec" }, "daily_import_custom_result_on_table" : { "description" : "Runs a custom query that retrieves a result of a data quality check performed in the data engineering, whose result (the severity level) is pulled from a separate table.", - "$ref" : "#/components/schemas/TableSqlImportCustomResultCheckSpec", - "originalRef" : "#/components/schemas/TableSqlImportCustomResultCheckSpec" + "originalRef" : "#/components/schemas/TableSqlImportCustomResultCheckSpec", + "$ref" : "#/components/schemas/TableSqlImportCustomResultCheckSpec" } } } @@ -87161,29 +87192,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_sql_condition_failed_on_table" : { "description" : "Verifies that a custom SQL expression is met for each row. Counts the number of rows where the expression is not satisfied, and raises an issue if too many failures were detected. This check is used also to compare values between columns: `{alias}.col_price > {alias}.col_tax`. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/TableSqlConditionFailedCheckSpec", - "originalRef" : "#/components/schemas/TableSqlConditionFailedCheckSpec" + "originalRef" : "#/components/schemas/TableSqlConditionFailedCheckSpec", + "$ref" : "#/components/schemas/TableSqlConditionFailedCheckSpec" }, "daily_partition_sql_condition_passed_percent_on_table" : { "description" : "Verifies that a minimum percentage of rows passed a custom SQL condition (expression). Reference the current table by using tokens, for example: `{alias}.col_price > {alias}.col_tax`. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec", - "originalRef" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec" + "originalRef" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec", + "$ref" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec" }, "daily_partition_sql_aggregate_expression_on_table" : { "description" : "Verifies that a custom aggregated SQL expression (MIN, MAX, etc.) is not outside the expected range. Stores a separate data quality check result for each daily partition.", - "$ref" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec", - "originalRef" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec" + "originalRef" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec", + "$ref" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec" }, "daily_partition_import_custom_result_on_table" : { "description" : "Runs a custom query that retrieves a result of a data quality check performed in the data engineering, whose result (the severity level) is pulled from a separate table.", - "$ref" : "#/components/schemas/TableSqlImportCustomResultCheckSpec", - "originalRef" : "#/components/schemas/TableSqlImportCustomResultCheckSpec" + "originalRef" : "#/components/schemas/TableSqlImportCustomResultCheckSpec", + "$ref" : "#/components/schemas/TableSqlImportCustomResultCheckSpec" } } } @@ -87237,29 +87268,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_sql_condition_failed_on_table" : { "description" : "Verifies that a custom SQL expression is met for each row. Counts the number of rows where the expression is not satisfied, and raises an issue if too many failures were detected. This check is used also to compare values between columns: `{alias}.col_price > {alias}.col_tax`. Stores the most recent count of failed rows for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableSqlConditionFailedCheckSpec", - "originalRef" : "#/components/schemas/TableSqlConditionFailedCheckSpec" + "originalRef" : "#/components/schemas/TableSqlConditionFailedCheckSpec", + "$ref" : "#/components/schemas/TableSqlConditionFailedCheckSpec" }, "monthly_sql_condition_passed_percent_on_table" : { "description" : "Verifies that a minimum percentage of rows passed a custom SQL condition (expression). Reference the current table by using tokens, for example: `{alias}.col_price > {alias}.col_tax`. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec", - "originalRef" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec" + "originalRef" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec", + "$ref" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec" }, "monthly_sql_aggregate_expression_on_table" : { "description" : "Verifies that a custom aggregated SQL expression (MIN, MAX, etc.) is not outside the expected range. Stores the most recent value for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec", - "originalRef" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec" + "originalRef" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec", + "$ref" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec" }, "monthly_import_custom_result_on_table" : { "description" : "Runs a custom query that retrieves a result of a data quality check performed in the data engineering, whose result (the severity level) is pulled from a separate table.", - "$ref" : "#/components/schemas/TableSqlImportCustomResultCheckSpec", - "originalRef" : "#/components/schemas/TableSqlImportCustomResultCheckSpec" + "originalRef" : "#/components/schemas/TableSqlImportCustomResultCheckSpec", + "$ref" : "#/components/schemas/TableSqlImportCustomResultCheckSpec" } } } @@ -87313,29 +87344,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_sql_condition_failed_on_table" : { "description" : "Verifies that a custom SQL expression is met for each row. Counts the number of rows where the expression is not satisfied, and raises an issue if too many failures were detected. This check is used also to compare values between columns: `{alias}.col_price > {alias}.col_tax`. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/TableSqlConditionFailedCheckSpec", - "originalRef" : "#/components/schemas/TableSqlConditionFailedCheckSpec" + "originalRef" : "#/components/schemas/TableSqlConditionFailedCheckSpec", + "$ref" : "#/components/schemas/TableSqlConditionFailedCheckSpec" }, "monthly_partition_sql_condition_passed_percent_on_table" : { "description" : "Verifies that a minimum percentage of rows passed a custom SQL condition (expression). Reference the current table by using tokens, for example: `{alias}.col_price > {alias}.col_tax`. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec", - "originalRef" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec" + "originalRef" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec", + "$ref" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec" }, "monthly_partition_sql_aggregate_expression_on_table" : { "description" : "Verifies that a custom aggregated SQL expression (MIN, MAX, etc.) is not outside the expected range. Stores a separate data quality check result for each monthly partition.", - "$ref" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec", - "originalRef" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec" + "originalRef" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec", + "$ref" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec" }, "monthly_partition_import_custom_result_on_table" : { "description" : "Runs a custom query that retrieves a result of a data quality check performed in the data engineering, whose result (the severity level) is pulled from a separate table.", - "$ref" : "#/components/schemas/TableSqlImportCustomResultCheckSpec", - "originalRef" : "#/components/schemas/TableSqlImportCustomResultCheckSpec" + "originalRef" : "#/components/schemas/TableSqlImportCustomResultCheckSpec", + "$ref" : "#/components/schemas/TableSqlImportCustomResultCheckSpec" } } } @@ -87389,29 +87420,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_sql_condition_failed_on_table" : { "description" : "Verifies that a minimum percentage of rows passed a custom SQL condition (expression). Reference the current table by using tokens, for example: `{alias}.col_price > {alias}.col_tax`.", - "$ref" : "#/components/schemas/TableSqlConditionFailedCheckSpec", - "originalRef" : "#/components/schemas/TableSqlConditionFailedCheckSpec" + "originalRef" : "#/components/schemas/TableSqlConditionFailedCheckSpec", + "$ref" : "#/components/schemas/TableSqlConditionFailedCheckSpec" }, "profile_sql_condition_passed_percent_on_table" : { "description" : "Verifies that a custom SQL expression is met for each row. Counts the number of rows where the expression is not satisfied, and raises an issue if too many failures were detected. This check is used also to compare values between columns: `{alias}.col_price > {alias}.col_tax`.", - "$ref" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec", - "originalRef" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec" + "originalRef" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec", + "$ref" : "#/components/schemas/TableSqlConditionPassedPercentCheckSpec" }, "profile_sql_aggregate_expression_on_table" : { "description" : "Verifies that a custom aggregated SQL expression (MIN, MAX, etc.) is not outside the expected range.", - "$ref" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec", - "originalRef" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec" + "originalRef" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec", + "$ref" : "#/components/schemas/TableSqlAggregateExpressionCheckSpec" }, "profile_import_custom_result_on_table" : { "description" : "Runs a custom query that retrieves a result of a data quality check performed in the data engineering, whose result (the severity level) is pulled from a separate table.", - "$ref" : "#/components/schemas/TableSqlImportCustomResultCheckSpec", - "originalRef" : "#/components/schemas/TableSqlImportCustomResultCheckSpec" + "originalRef" : "#/components/schemas/TableSqlImportCustomResultCheckSpec", + "$ref" : "#/components/schemas/TableSqlImportCustomResultCheckSpec" } } } @@ -87496,51 +87527,51 @@ "type" : "object", "description" : "Dictionary of custom checks. The keys are check names within this category.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "volume" : { "description" : "Daily monitoring volume data quality checks", - "$ref" : "#/components/schemas/TableVolumeDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/TableVolumeDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/TableVolumeDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/TableVolumeDailyMonitoringChecksSpec" }, "timeliness" : { "description" : "Daily monitoring timeliness checks", - "$ref" : "#/components/schemas/TableTimelinessDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/TableTimelinessDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/TableTimelinessDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/TableTimelinessDailyMonitoringChecksSpec" }, "accuracy" : { "description" : "Daily monitoring accuracy checks", - "$ref" : "#/components/schemas/TableAccuracyDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/TableAccuracyDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/TableAccuracyDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/TableAccuracyDailyMonitoringChecksSpec" }, "custom_sql" : { "description" : "Daily monitoring custom SQL checks", - "$ref" : "#/components/schemas/TableCustomSqlDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/TableCustomSqlDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/TableCustomSqlDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/TableCustomSqlDailyMonitoringChecksSpec" }, "availability" : { "description" : "Daily monitoring table availability checks", - "$ref" : "#/components/schemas/TableAvailabilityDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/TableAvailabilityDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/TableAvailabilityDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/TableAvailabilityDailyMonitoringChecksSpec" }, "schema" : { "description" : "Daily monitoring table schema checks", - "$ref" : "#/components/schemas/TableSchemaDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/TableSchemaDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/TableSchemaDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/TableSchemaDailyMonitoringChecksSpec" }, "uniqueness" : { "description" : "Daily monitoring uniqueness checks on a table level.", - "$ref" : "#/components/schemas/TableUniquenessDailyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/TableUniquenessDailyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/TableUniquenessDailyMonitoringChecksSpec", + "$ref" : "#/components/schemas/TableUniquenessDailyMonitoringChecksSpec" }, "comparisons" : { "type" : "object", "description" : "Dictionary of configuration of checks for table comparisons. The key that identifies each comparison must match the name of a data comparison that is configured on the parent table.", "additionalProperties" : { - "$ref" : "#/definitions/TableComparisonDailyMonitoringChecksSpec", - "originalRef" : "#/definitions/TableComparisonDailyMonitoringChecksSpec" + "originalRef" : "#/definitions/TableComparisonDailyMonitoringChecksSpec", + "$ref" : "#/definitions/TableComparisonDailyMonitoringChecksSpec" } } } @@ -87611,36 +87642,36 @@ "type" : "object", "description" : "Dictionary of custom checks. The keys are check names within this category.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "volume" : { "description" : "Volume daily partitioned data quality checks that verify the quality of every day of data separately", - "$ref" : "#/components/schemas/TableVolumeDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/TableVolumeDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/TableVolumeDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/TableVolumeDailyPartitionedChecksSpec" }, "timeliness" : { "description" : "Daily partitioned timeliness checks", - "$ref" : "#/components/schemas/TableTimelinessDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/TableTimelinessDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/TableTimelinessDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/TableTimelinessDailyPartitionedChecksSpec" }, "custom_sql" : { "description" : "Custom SQL daily partitioned data quality checks that verify the quality of every day of data separately", - "$ref" : "#/components/schemas/TableCustomSqlDailyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/TableCustomSqlDailyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/TableCustomSqlDailyPartitionedChecksSpec", + "$ref" : "#/components/schemas/TableCustomSqlDailyPartitionedChecksSpec" }, "uniqueness" : { "description" : "Daily partitioned uniqueness checks on a table level.", - "$ref" : "#/components/schemas/TableUniquenessDailyPartitionChecksSpec", - "originalRef" : "#/components/schemas/TableUniquenessDailyPartitionChecksSpec" + "originalRef" : "#/components/schemas/TableUniquenessDailyPartitionChecksSpec", + "$ref" : "#/components/schemas/TableUniquenessDailyPartitionChecksSpec" }, "comparisons" : { "type" : "object", "description" : "Dictionary of configuration of checks for table comparisons. The key that identifies each comparison must match the name of a data comparison that is configured on the parent table.", "additionalProperties" : { - "$ref" : "#/definitions/TableComparisonDailyPartitionedChecksSpec", - "originalRef" : "#/definitions/TableComparisonDailyPartitionedChecksSpec" + "originalRef" : "#/definitions/TableComparisonDailyPartitionedChecksSpec", + "$ref" : "#/definitions/TableComparisonDailyPartitionedChecksSpec" } } } @@ -87750,15 +87781,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -87795,23 +87826,23 @@ }, "parameters" : { "description" : "Max days since most recent event sensor parameters", - "$ref" : "#/components/schemas/TableTimelinessDataFreshnessSensorParametersSpec", - "originalRef" : "#/components/schemas/TableTimelinessDataFreshnessSensorParametersSpec" + "originalRef" : "#/components/schemas/TableTimelinessDataFreshnessSensorParametersSpec", + "$ref" : "#/components/schemas/TableTimelinessDataFreshnessSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyTimelinessDelayRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyTimelinessDelayRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyTimelinessDelayRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyTimelinessDelayRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for max days since most recent event that raises a data quality error (alert)", - "$ref" : "#/components/schemas/AnomalyTimelinessDelayRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyTimelinessDelayRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyTimelinessDelayRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyTimelinessDelayRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyTimelinessDelayRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyTimelinessDelayRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyTimelinessDelayRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyTimelinessDelayRuleFatal01PctParametersSpec" } } } @@ -87920,15 +87951,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -87965,23 +87996,23 @@ }, "parameters" : { "description" : "Max days since most recent event sensor parameters", - "$ref" : "#/components/schemas/TableTimelinessDataFreshnessSensorParametersSpec", - "originalRef" : "#/components/schemas/TableTimelinessDataFreshnessSensorParametersSpec" + "originalRef" : "#/components/schemas/TableTimelinessDataFreshnessSensorParametersSpec", + "$ref" : "#/components/schemas/TableTimelinessDataFreshnessSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxDaysRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDaysRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDaysRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDaysRule1ParametersSpec" }, "error" : { "description" : "Default alerting threshold for max days since most recent event that raises a data quality error (alert)", - "$ref" : "#/components/schemas/MaxDaysRule2ParametersSpec", - "originalRef" : "#/components/schemas/MaxDaysRule2ParametersSpec" + "originalRef" : "#/components/schemas/MaxDaysRule2ParametersSpec", + "$ref" : "#/components/schemas/MaxDaysRule2ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxDaysRule7ParametersSpec", - "originalRef" : "#/components/schemas/MaxDaysRule7ParametersSpec" + "originalRef" : "#/components/schemas/MaxDaysRule7ParametersSpec", + "$ref" : "#/components/schemas/MaxDaysRule7ParametersSpec" } } } @@ -88090,15 +88121,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -88135,23 +88166,23 @@ }, "parameters" : { "description" : "Max number of days between event and ingestion sensor parameters", - "$ref" : "#/components/schemas/TableTimelinessDataIngestionDelaySensorParametersSpec", - "originalRef" : "#/components/schemas/TableTimelinessDataIngestionDelaySensorParametersSpec" + "originalRef" : "#/components/schemas/TableTimelinessDataIngestionDelaySensorParametersSpec", + "$ref" : "#/components/schemas/TableTimelinessDataIngestionDelaySensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxDaysRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDaysRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDaysRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDaysRule1ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a max number of days between event and ingestion check that raises a data quality error (alert)", - "$ref" : "#/components/schemas/MaxDaysRule2ParametersSpec", - "originalRef" : "#/components/schemas/MaxDaysRule2ParametersSpec" + "originalRef" : "#/components/schemas/MaxDaysRule2ParametersSpec", + "$ref" : "#/components/schemas/MaxDaysRule2ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxDaysRule7ParametersSpec", - "originalRef" : "#/components/schemas/MaxDaysRule7ParametersSpec" + "originalRef" : "#/components/schemas/MaxDaysRule7ParametersSpec", + "$ref" : "#/components/schemas/MaxDaysRule7ParametersSpec" } } } @@ -88260,15 +88291,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -88305,23 +88336,23 @@ }, "parameters" : { "description" : "Min number of days between event and ingestion sensor parameters", - "$ref" : "#/components/schemas/TableTimelinessDataStalenessSensorParametersSpec", - "originalRef" : "#/components/schemas/TableTimelinessDataStalenessSensorParametersSpec" + "originalRef" : "#/components/schemas/TableTimelinessDataStalenessSensorParametersSpec", + "$ref" : "#/components/schemas/TableTimelinessDataStalenessSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxDaysRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDaysRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDaysRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDaysRule1ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a min number of days between event and ingestion check that raises a data quality error (alert)", - "$ref" : "#/components/schemas/MaxDaysRule2ParametersSpec", - "originalRef" : "#/components/schemas/MaxDaysRule2ParametersSpec" + "originalRef" : "#/components/schemas/MaxDaysRule2ParametersSpec", + "$ref" : "#/components/schemas/MaxDaysRule2ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxDaysRule7ParametersSpec", - "originalRef" : "#/components/schemas/MaxDaysRule7ParametersSpec" + "originalRef" : "#/components/schemas/MaxDaysRule7ParametersSpec", + "$ref" : "#/components/schemas/MaxDaysRule7ParametersSpec" } } } @@ -88430,15 +88461,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -88475,23 +88506,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/TableDuplicateRecordCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableDuplicateRecordCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableDuplicateRecordCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableDuplicateRecordCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with nulls in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -88643,15 +88674,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -88688,23 +88719,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/TableDuplicateRecordPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/TableDuplicateRecordPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/TableDuplicateRecordPercentSensorParametersSpec", + "$ref" : "#/components/schemas/TableDuplicateRecordPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows with nulls in a column that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec", - "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec" + "originalRef" : "#/components/schemas/MaxPercentRule5ParametersSpec", + "$ref" : "#/components/schemas/MaxPercentRule5ParametersSpec" } } } @@ -88849,28 +88880,28 @@ "properties" : { "source_table" : { "description" : "The source table.", - "$ref" : "#/components/schemas/DomainConnectionTableKey", - "originalRef" : "#/components/schemas/DomainConnectionTableKey" + "originalRef" : "#/components/schemas/DomainConnectionTableKey", + "$ref" : "#/components/schemas/DomainConnectionTableKey" }, "target_table" : { "description" : "The target table.", - "$ref" : "#/components/schemas/DomainConnectionTableKey", - "originalRef" : "#/components/schemas/DomainConnectionTableKey" + "originalRef" : "#/components/schemas/DomainConnectionTableKey", + "$ref" : "#/components/schemas/DomainConnectionTableKey" }, "source_table_quality_status" : { "description" : "The current data quality status of the source table.", - "$ref" : "#/components/schemas/TableCurrentDataQualityStatusModel", - "originalRef" : "#/components/schemas/TableCurrentDataQualityStatusModel" + "originalRef" : "#/components/schemas/TableCurrentDataQualityStatusModel", + "$ref" : "#/components/schemas/TableCurrentDataQualityStatusModel" }, "target_table_quality_status" : { "description" : "The current data quality status of the target table.", - "$ref" : "#/components/schemas/TableCurrentDataQualityStatusModel", - "originalRef" : "#/components/schemas/TableCurrentDataQualityStatusModel" + "originalRef" : "#/components/schemas/TableCurrentDataQualityStatusModel", + "$ref" : "#/components/schemas/TableCurrentDataQualityStatusModel" }, "upstream_combined_quality_status" : { "description" : "The data quality status identified from the data quality status of all upstream tables and the target table.", - "$ref" : "#/components/schemas/TableCurrentDataQualityStatusModel", - "originalRef" : "#/components/schemas/TableCurrentDataQualityStatusModel" + "originalRef" : "#/components/schemas/TableCurrentDataQualityStatusModel", + "$ref" : "#/components/schemas/TableCurrentDataQualityStatusModel" }, "row_count" : { "type" : "integer", @@ -88922,20 +88953,20 @@ "properties" : { "relative_table" : { "description" : "The table for which the data lineage is generated.", - "$ref" : "#/components/schemas/DomainConnectionTableKey", - "originalRef" : "#/components/schemas/DomainConnectionTableKey" + "originalRef" : "#/components/schemas/DomainConnectionTableKey", + "$ref" : "#/components/schemas/DomainConnectionTableKey" }, "relative_table_cumulative_quality_status" : { "description" : "The data quality status of the reference table (in the middle of the data lineage) showing the highest severity problems detected on the reference table and all upstream tables from which some issues have come.", - "$ref" : "#/components/schemas/TableCurrentDataQualityStatusModel", - "originalRef" : "#/components/schemas/TableCurrentDataQualityStatusModel" + "originalRef" : "#/components/schemas/TableCurrentDataQualityStatusModel", + "$ref" : "#/components/schemas/TableCurrentDataQualityStatusModel" }, "flows" : { "type" : "array", "description" : "A list of data flows from source tables to direct target tables. Describes the data quality status of the source table.", "items" : { - "$ref" : "#/components/schemas/TableLineageFlowModel", - "originalRef" : "#/components/schemas/TableLineageFlowModel" + "originalRef" : "#/components/schemas/TableLineageFlowModel", + "$ref" : "#/components/schemas/TableLineageFlowModel" } }, "data_lineage_fully_loaded" : { @@ -89043,8 +89074,8 @@ "type" : "object", "description" : "Configuration of source columns for each column in the current table. The keys in this dictionary are column names in the current table. The object stored in the dictionary contain a list of source columns.", "additionalProperties" : { - "$ref" : "#/definitions/ColumnLineageSourceSpec", - "originalRef" : "#/definitions/ColumnLineageSourceSpec" + "originalRef" : "#/definitions/ColumnLineageSourceSpec", + "$ref" : "#/definitions/ColumnLineageSourceSpec" } } } @@ -89179,8 +89210,8 @@ }, "table_data_quality_status" : { "description" : "The current data quality status for the table, grouped by data quality dimensions. DQOps may return a null value when the results were not yet loaded into the cache. In that case, the client should wait a few seconds and retry a call to get the most recent data quality status of the table.", - "$ref" : "#/components/schemas/TableCurrentDataQualityStatusModel", - "originalRef" : "#/components/schemas/TableCurrentDataQualityStatusModel" + "originalRef" : "#/components/schemas/TableCurrentDataQualityStatusModel", + "$ref" : "#/components/schemas/TableCurrentDataQualityStatusModel" } }, "description" : "Data lineage model that describes one source or target table of the current table." @@ -89419,8 +89450,8 @@ }, "target" : { "description" : "Physical table details (a physical schema name and a physical table name).", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "labels" : { "type" : "array", @@ -89456,8 +89487,8 @@ }, "owner" : { "description" : "Table owner information like the data steward name or the business application name.", - "$ref" : "#/components/schemas/TableOwnerSpec", - "originalRef" : "#/components/schemas/TableOwnerSpec" + "originalRef" : "#/components/schemas/TableOwnerSpec", + "$ref" : "#/components/schemas/TableOwnerSpec" }, "profiling_checks_result_truncation" : { "type" : "string", @@ -89466,13 +89497,13 @@ }, "file_format" : { "description" : "File format for a file based table, such as a CSV or Parquet file.", - "$ref" : "#/components/schemas/FileFormatSpec", - "originalRef" : "#/components/schemas/FileFormatSpec" + "originalRef" : "#/components/schemas/FileFormatSpec", + "$ref" : "#/components/schemas/FileFormatSpec" }, "data_quality_status" : { "description" : "The current data quality status for the table, grouped by data quality dimensions. DQOps may return a null value when the results were not yet loaded into the cache. In that case, the client should wait a few seconds and retry a call to get the most recent data quality status of the table.", - "$ref" : "#/components/schemas/TableCurrentDataQualityStatusModel", - "originalRef" : "#/components/schemas/TableCurrentDataQualityStatusModel" + "originalRef" : "#/components/schemas/TableCurrentDataQualityStatusModel", + "$ref" : "#/components/schemas/TableCurrentDataQualityStatusModel" }, "has_any_configured_checks" : { "type" : "boolean", @@ -89496,33 +89527,33 @@ }, "run_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run all checks within this table.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "run_profiling_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run profiling checks within this table.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "run_monitoring_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run monitoring checks within this table.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "run_partition_checks_job_template" : { "description" : "Configured parameters for the \"check run\" job that should be pushed to the job queue in order to run partition partitioned checks within this table.", - "$ref" : "#/components/schemas/CheckSearchFilters", - "originalRef" : "#/components/schemas/CheckSearchFilters" + "originalRef" : "#/components/schemas/CheckSearchFilters", + "$ref" : "#/components/schemas/CheckSearchFilters" }, "collect_statistics_job_template" : { "description" : "Configured parameters for the \"collect statistics\" job that should be pushed to the job queue in order to run all statistics collectors within this table.", - "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters", - "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters" + "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters", + "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters" }, "data_clean_job_template" : { "description" : "Configured parameters for the \"data clean\" job that after being supplied with a time range should be pushed to the job queue in order to remove stored results connected with this table.", - "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters", - "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters" + "originalRef" : "#/components/schemas/DeleteStoredDataQueueJobParameters", + "$ref" : "#/components/schemas/DeleteStoredDataQueueJobParameters" }, "advanced_properties" : { "type" : "object", @@ -89610,8 +89641,8 @@ }, "spec" : { "description" : "Full table specification including all nested information, the table name is inside the 'target' property.", - "$ref" : "#/components/schemas/TableSpec", - "originalRef" : "#/components/schemas/TableSpec" + "originalRef" : "#/components/schemas/TableSpec", + "$ref" : "#/components/schemas/TableSpec" }, "can_edit" : { "type" : "boolean", @@ -89646,13 +89677,13 @@ "properties" : { "daily" : { "description" : "Configuration of daily monitoring evaluated at a table level.", - "$ref" : "#/components/schemas/TableDailyMonitoringCheckCategoriesSpec", - "originalRef" : "#/components/schemas/TableDailyMonitoringCheckCategoriesSpec" + "originalRef" : "#/components/schemas/TableDailyMonitoringCheckCategoriesSpec", + "$ref" : "#/components/schemas/TableDailyMonitoringCheckCategoriesSpec" }, "monthly" : { "description" : "Configuration of monthly monitoring evaluated at a table level.", - "$ref" : "#/components/schemas/TableMonthlyMonitoringCheckCategoriesSpec", - "originalRef" : "#/components/schemas/TableMonthlyMonitoringCheckCategoriesSpec" + "originalRef" : "#/components/schemas/TableMonthlyMonitoringCheckCategoriesSpec", + "$ref" : "#/components/schemas/TableMonthlyMonitoringCheckCategoriesSpec" } } } @@ -89737,51 +89768,51 @@ "type" : "object", "description" : "Dictionary of custom checks. The keys are check names within this category.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "volume" : { "description" : "Monthly monitoring of volume data quality checks", - "$ref" : "#/components/schemas/TableVolumeMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/TableVolumeMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/TableVolumeMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/TableVolumeMonthlyMonitoringChecksSpec" }, "timeliness" : { "description" : "Monthly monitoring of timeliness checks", - "$ref" : "#/components/schemas/TableTimelinessMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/TableTimelinessMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/TableTimelinessMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/TableTimelinessMonthlyMonitoringChecksSpec" }, "accuracy" : { "description" : "Monthly monitoring accuracy checks", - "$ref" : "#/components/schemas/TableAccuracyMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/TableAccuracyMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/TableAccuracyMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/TableAccuracyMonthlyMonitoringChecksSpec" }, "custom_sql" : { "description" : "Monthly monitoring of custom SQL checks", - "$ref" : "#/components/schemas/TableCustomSqlMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/TableCustomSqlMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/TableCustomSqlMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/TableCustomSqlMonthlyMonitoringChecksSpec" }, "availability" : { "description" : "Daily partitioned availability checks", - "$ref" : "#/components/schemas/TableAvailabilityMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/TableAvailabilityMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/TableAvailabilityMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/TableAvailabilityMonthlyMonitoringChecksSpec" }, "schema" : { "description" : "Monthly monitoring table schema checks", - "$ref" : "#/components/schemas/TableSchemaMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/TableSchemaMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/TableSchemaMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/TableSchemaMonthlyMonitoringChecksSpec" }, "uniqueness" : { "description" : "Monthly monitoring uniqueness checks on a table level.", - "$ref" : "#/components/schemas/TableUniquenessMonthlyMonitoringChecksSpec", - "originalRef" : "#/components/schemas/TableUniquenessMonthlyMonitoringChecksSpec" + "originalRef" : "#/components/schemas/TableUniquenessMonthlyMonitoringChecksSpec", + "$ref" : "#/components/schemas/TableUniquenessMonthlyMonitoringChecksSpec" }, "comparisons" : { "type" : "object", "description" : "Dictionary of configuration of checks for table comparisons. The key that identifies each comparison must match the name of a data comparison that is configured on the parent table.", "additionalProperties" : { - "$ref" : "#/definitions/TableComparisonMonthlyMonitoringChecksSpec", - "originalRef" : "#/definitions/TableComparisonMonthlyMonitoringChecksSpec" + "originalRef" : "#/definitions/TableComparisonMonthlyMonitoringChecksSpec", + "$ref" : "#/definitions/TableComparisonMonthlyMonitoringChecksSpec" } } } @@ -89852,36 +89883,36 @@ "type" : "object", "description" : "Dictionary of custom checks. The keys are check names within this category.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "volume" : { "description" : "Volume monthly partitioned data quality checks that verify the quality of every month of data separately", - "$ref" : "#/components/schemas/TableVolumeMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/TableVolumeMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/TableVolumeMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/TableVolumeMonthlyPartitionedChecksSpec" }, "timeliness" : { "description" : "Monthly partitioned timeliness checks", - "$ref" : "#/components/schemas/TableTimelinessMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/TableTimelinessMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/TableTimelinessMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/TableTimelinessMonthlyPartitionedChecksSpec" }, "custom_sql" : { "description" : "Custom SQL monthly partitioned data quality checks that verify the quality of every month of data separately", - "$ref" : "#/components/schemas/TableCustomSqlMonthlyPartitionedChecksSpec", - "originalRef" : "#/components/schemas/TableCustomSqlMonthlyPartitionedChecksSpec" + "originalRef" : "#/components/schemas/TableCustomSqlMonthlyPartitionedChecksSpec", + "$ref" : "#/components/schemas/TableCustomSqlMonthlyPartitionedChecksSpec" }, "uniqueness" : { "description" : "Monthly partitioned uniqueness checks on a table level.", - "$ref" : "#/components/schemas/TableUniquenessMonthlyPartitionChecksSpec", - "originalRef" : "#/components/schemas/TableUniquenessMonthlyPartitionChecksSpec" + "originalRef" : "#/components/schemas/TableUniquenessMonthlyPartitionChecksSpec", + "$ref" : "#/components/schemas/TableUniquenessMonthlyPartitionChecksSpec" }, "comparisons" : { "type" : "object", "description" : "Dictionary of configuration of checks for table comparisons. The key that identifies each comparison must match the name of a data comparison that is configured on the parent table.", "additionalProperties" : { - "$ref" : "#/definitions/TableComparisonMonthlyPartitionedChecksSpec", - "originalRef" : "#/definitions/TableComparisonMonthlyPartitionedChecksSpec" + "originalRef" : "#/definitions/TableComparisonMonthlyPartitionedChecksSpec", + "$ref" : "#/definitions/TableComparisonMonthlyPartitionedChecksSpec" } } } @@ -90025,15 +90056,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -90070,23 +90101,23 @@ }, "parameters" : { "description" : "Partition reload lag sensor parameters", - "$ref" : "#/components/schemas/TableTimelinessPartitionReloadLagSensorParametersSpec", - "originalRef" : "#/components/schemas/TableTimelinessPartitionReloadLagSensorParametersSpec" + "originalRef" : "#/components/schemas/TableTimelinessPartitionReloadLagSensorParametersSpec", + "$ref" : "#/components/schemas/TableTimelinessPartitionReloadLagSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MaxDaysRule1ParametersSpec", - "originalRef" : "#/components/schemas/MaxDaysRule1ParametersSpec" + "originalRef" : "#/components/schemas/MaxDaysRule1ParametersSpec", + "$ref" : "#/components/schemas/MaxDaysRule1ParametersSpec" }, "error" : { "description" : "Default alerting threshold for partition reload lag that raises a data quality error (alert)", - "$ref" : "#/components/schemas/MaxDaysRule2ParametersSpec", - "originalRef" : "#/components/schemas/MaxDaysRule2ParametersSpec" + "originalRef" : "#/components/schemas/MaxDaysRule2ParametersSpec", + "$ref" : "#/components/schemas/MaxDaysRule2ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MaxDaysRule7ParametersSpec", - "originalRef" : "#/components/schemas/MaxDaysRule7ParametersSpec" + "originalRef" : "#/components/schemas/MaxDaysRule7ParametersSpec", + "$ref" : "#/components/schemas/MaxDaysRule7ParametersSpec" } } } @@ -90112,13 +90143,13 @@ "properties" : { "daily" : { "description" : "Configuration of day partitioned data quality checks evaluated at a table level.", - "$ref" : "#/components/schemas/TableDailyPartitionedCheckCategoriesSpec", - "originalRef" : "#/components/schemas/TableDailyPartitionedCheckCategoriesSpec" + "originalRef" : "#/components/schemas/TableDailyPartitionedCheckCategoriesSpec", + "$ref" : "#/components/schemas/TableDailyPartitionedCheckCategoriesSpec" }, "monthly" : { "description" : "Configuration of monthly partitioned data quality checks evaluated at a table level..", - "$ref" : "#/components/schemas/TableMonthlyPartitionedCheckCategoriesSpec", - "originalRef" : "#/components/schemas/TableMonthlyPartitionedCheckCategoriesSpec" + "originalRef" : "#/components/schemas/TableMonthlyPartitionedCheckCategoriesSpec", + "$ref" : "#/components/schemas/TableMonthlyPartitionedCheckCategoriesSpec" } } } @@ -90168,18 +90199,18 @@ }, "target" : { "description" : "Physical table details (a physical schema name and a physical table name)", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "timestamp_columns" : { "description" : "Column names that store the timestamps that identify the event (transaction) timestamp and the ingestion (inserted / loaded at) timestamps. Also configures the timestamp source for the date/time partitioned data quality checks (event timestamp or ingestion timestamp).", - "$ref" : "#/components/schemas/TimestampColumnsSpec", - "originalRef" : "#/components/schemas/TimestampColumnsSpec" + "originalRef" : "#/components/schemas/TimestampColumnsSpec", + "$ref" : "#/components/schemas/TimestampColumnsSpec" }, "incremental_time_window" : { "description" : "Configuration of time windows for executing partition checks incrementally, configures the number of recent days to analyze for daily partitioned tables or the number of recent months for monthly partitioned data.", - "$ref" : "#/components/schemas/PartitionIncrementalTimeWindowSpec", - "originalRef" : "#/components/schemas/PartitionIncrementalTimeWindowSpec" + "originalRef" : "#/components/schemas/PartitionIncrementalTimeWindowSpec", + "$ref" : "#/components/schemas/PartitionIncrementalTimeWindowSpec" }, "can_edit" : { "type" : "boolean", @@ -90273,8 +90304,8 @@ "type" : "object", "description" : "Dictionary of custom checks. The keys are check names within this category.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "result_truncation" : { @@ -90284,45 +90315,45 @@ }, "volume" : { "description" : "Configuration of volume data quality checks on a table level.", - "$ref" : "#/components/schemas/TableVolumeProfilingChecksSpec", - "originalRef" : "#/components/schemas/TableVolumeProfilingChecksSpec" + "originalRef" : "#/components/schemas/TableVolumeProfilingChecksSpec", + "$ref" : "#/components/schemas/TableVolumeProfilingChecksSpec" }, "timeliness" : { "description" : "Configuration of timeliness checks on a table level. Timeliness checks detect anomalies like rapid row count changes.", - "$ref" : "#/components/schemas/TableTimelinessProfilingChecksSpec", - "originalRef" : "#/components/schemas/TableTimelinessProfilingChecksSpec" + "originalRef" : "#/components/schemas/TableTimelinessProfilingChecksSpec", + "$ref" : "#/components/schemas/TableTimelinessProfilingChecksSpec" }, "accuracy" : { "description" : "Configuration of accuracy checks on a table level. Accuracy checks compare the tested table with another reference table.", - "$ref" : "#/components/schemas/TableAccuracyProfilingChecksSpec", - "originalRef" : "#/components/schemas/TableAccuracyProfilingChecksSpec" + "originalRef" : "#/components/schemas/TableAccuracyProfilingChecksSpec", + "$ref" : "#/components/schemas/TableAccuracyProfilingChecksSpec" }, "custom_sql" : { "description" : "Configuration of data quality checks that are evaluating custom SQL conditions and aggregated expressions.", - "$ref" : "#/components/schemas/TableCustomSqlProfilingChecksSpec", - "originalRef" : "#/components/schemas/TableCustomSqlProfilingChecksSpec" + "originalRef" : "#/components/schemas/TableCustomSqlProfilingChecksSpec", + "$ref" : "#/components/schemas/TableCustomSqlProfilingChecksSpec" }, "availability" : { "description" : "Configuration of the table availability data quality checks on a table level.", - "$ref" : "#/components/schemas/TableAvailabilityProfilingChecksSpec", - "originalRef" : "#/components/schemas/TableAvailabilityProfilingChecksSpec" + "originalRef" : "#/components/schemas/TableAvailabilityProfilingChecksSpec", + "$ref" : "#/components/schemas/TableAvailabilityProfilingChecksSpec" }, "schema" : { "description" : "Configuration of schema (column count and schema) data quality checks on a table level.", - "$ref" : "#/components/schemas/TableSchemaProfilingChecksSpec", - "originalRef" : "#/components/schemas/TableSchemaProfilingChecksSpec" + "originalRef" : "#/components/schemas/TableSchemaProfilingChecksSpec", + "$ref" : "#/components/schemas/TableSchemaProfilingChecksSpec" }, "uniqueness" : { "description" : "Configuration of uniqueness checks on a table level.", - "$ref" : "#/components/schemas/TableUniquenessProfilingChecksSpec", - "originalRef" : "#/components/schemas/TableUniquenessProfilingChecksSpec" + "originalRef" : "#/components/schemas/TableUniquenessProfilingChecksSpec", + "$ref" : "#/components/schemas/TableUniquenessProfilingChecksSpec" }, "comparisons" : { "type" : "object", "description" : "Dictionary of configuration of checks for table comparisons. The key that identifies each comparison must match the name of a data comparison that is configured on the parent table.", "additionalProperties" : { - "$ref" : "#/definitions/TableComparisonProfilingChecksSpec", - "originalRef" : "#/definitions/TableComparisonProfilingChecksSpec" + "originalRef" : "#/definitions/TableComparisonProfilingChecksSpec", + "$ref" : "#/definitions/TableComparisonProfilingChecksSpec" } } } @@ -90404,8 +90435,8 @@ }, "target" : { "description" : "Physical table details (a physical schema name and a physical table name).", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "basic_statistics_collected" : { "type" : "boolean", @@ -90508,8 +90539,8 @@ }, "target_table" : { "description" : "The filters for the target table.", - "$ref" : "#/components/schemas/TargetTablePatternSpec", - "originalRef" : "#/components/schemas/TargetTablePatternSpec" + "originalRef" : "#/components/schemas/TargetTablePatternSpec", + "$ref" : "#/components/schemas/TargetTablePatternSpec" }, "can_edit" : { "type" : "boolean", @@ -90565,8 +90596,8 @@ }, "policy_spec" : { "description" : "The quality policy specification.", - "$ref" : "#/components/schemas/TableQualityPolicySpec", - "originalRef" : "#/components/schemas/TableQualityPolicySpec" + "originalRef" : "#/components/schemas/TableQualityPolicySpec", + "$ref" : "#/components/schemas/TableQualityPolicySpec" }, "can_edit" : { "type" : "boolean", @@ -90646,23 +90677,23 @@ }, "target" : { "description" : "The target table filter that are filtering the table and connection on which the default checks are applied.", - "$ref" : "#/components/schemas/TargetTablePatternSpec", - "originalRef" : "#/components/schemas/TargetTablePatternSpec" + "originalRef" : "#/components/schemas/TargetTablePatternSpec", + "$ref" : "#/components/schemas/TargetTablePatternSpec" }, "profiling_checks" : { "description" : "Configuration of data quality profiling checks that are enabled. Pick a check from a category, apply the parameters and rules to enable it.", - "$ref" : "#/components/schemas/TableProfilingCheckCategoriesSpec", - "originalRef" : "#/components/schemas/TableProfilingCheckCategoriesSpec" + "originalRef" : "#/components/schemas/TableProfilingCheckCategoriesSpec", + "$ref" : "#/components/schemas/TableProfilingCheckCategoriesSpec" }, "monitoring_checks" : { "description" : "Configuration of table level monitoring checks. Monitoring checks are data quality checks that are evaluated for each period of time (daily, weekly, monthly, etc.). A monitoring check stores only the most recent data quality check result for each period of time.", - "$ref" : "#/components/schemas/TableMonitoringCheckCategoriesSpec", - "originalRef" : "#/components/schemas/TableMonitoringCheckCategoriesSpec" + "originalRef" : "#/components/schemas/TableMonitoringCheckCategoriesSpec", + "$ref" : "#/components/schemas/TableMonitoringCheckCategoriesSpec" }, "partitioned_checks" : { "description" : "Configuration of table level date/time partitioned checks. Partitioned data quality checks are evaluated for each partition separately, raising separate alerts at a partition level. The table does not need to be physically partitioned by date, it is possible to run data quality checks for each day or month of data separately.", - "$ref" : "#/components/schemas/TablePartitionedCheckCategoriesSpec", - "originalRef" : "#/components/schemas/TablePartitionedCheckCategoriesSpec" + "originalRef" : "#/components/schemas/TablePartitionedCheckCategoriesSpec", + "$ref" : "#/components/schemas/TablePartitionedCheckCategoriesSpec" } } } @@ -90771,15 +90802,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -90816,23 +90847,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyDifferencingPercentileMovingAverageRuleFatal01PctParametersSpec" } } } @@ -90941,15 +90972,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -90986,23 +91017,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/AnomalyPartitionRowCountRuleWarning1PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyPartitionRowCountRuleWarning1PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyPartitionRowCountRuleWarning1PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyPartitionRowCountRuleWarning1PctParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/AnomalyPartitionRowCountRuleError05PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyPartitionRowCountRuleError05PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyPartitionRowCountRuleError05PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyPartitionRowCountRuleError05PctParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/AnomalyPartitionRowCountRuleFatal01PctParametersSpec", - "originalRef" : "#/components/schemas/AnomalyPartitionRowCountRuleFatal01PctParametersSpec" + "originalRef" : "#/components/schemas/AnomalyPartitionRowCountRuleFatal01PctParametersSpec", + "$ref" : "#/components/schemas/AnomalyPartitionRowCountRuleFatal01PctParametersSpec" } } } @@ -91111,15 +91142,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -91156,23 +91187,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent1DayRule50ParametersSpec" } } } @@ -91281,15 +91312,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -91326,23 +91357,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent30DaysRule50ParametersSpec" } } } @@ -91451,15 +91482,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -91496,23 +91527,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercent7DaysRule50ParametersSpec" } } } @@ -91621,15 +91652,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -91666,23 +91697,23 @@ }, "parameters" : { "description" : "Data quality check parameters", - "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ChangePercentRule10ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule10ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule10ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule10ParametersSpec" }, "error" : { "description" : "Default alerting threshold for a set number of rows with negative value in a column that raises a data quality alert", - "$ref" : "#/components/schemas/ChangePercentRule20ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule20ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule20ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule20ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ChangePercentRule50ParametersSpec", - "originalRef" : "#/components/schemas/ChangePercentRule50ParametersSpec" + "originalRef" : "#/components/schemas/ChangePercentRule50ParametersSpec", + "$ref" : "#/components/schemas/ChangePercentRule50ParametersSpec" } } } @@ -91791,15 +91822,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -91836,23 +91867,23 @@ }, "parameters" : { "description" : "Row count sensor parameters", - "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/MinCountRule1ParametersSpec", - "originalRef" : "#/components/schemas/MinCountRule1ParametersSpec" + "originalRef" : "#/components/schemas/MinCountRule1ParametersSpec", + "$ref" : "#/components/schemas/MinCountRule1ParametersSpec" }, "error" : { "description" : "Default alerting threshold that raises a data quality issue at an error severity level", - "$ref" : "#/components/schemas/MinCountRule1ParametersSpec", - "originalRef" : "#/components/schemas/MinCountRule1ParametersSpec" + "originalRef" : "#/components/schemas/MinCountRule1ParametersSpec", + "$ref" : "#/components/schemas/MinCountRule1ParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/MinCountRule1ParametersSpec", - "originalRef" : "#/components/schemas/MinCountRule1ParametersSpec" + "originalRef" : "#/components/schemas/MinCountRule1ParametersSpec", + "$ref" : "#/components/schemas/MinCountRule1ParametersSpec" } } } @@ -91961,15 +91992,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -92006,23 +92037,23 @@ }, "parameters" : { "description" : "Column count sensor parameters", - "$ref" : "#/components/schemas/TableColumnCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableColumnCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableColumnCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableColumnCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" }, "error" : { "description" : "Default alerting thresholdthat raises a data quality issue at an error severity level", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" } } } @@ -92131,15 +92162,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -92176,23 +92207,23 @@ }, "parameters" : { "description" : "Column count sensor parameters", - "$ref" : "#/components/schemas/TableColumnCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableColumnCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableColumnCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableColumnCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/EqualsIntegerRuleParametersSpec", - "originalRef" : "#/components/schemas/EqualsIntegerRuleParametersSpec" + "originalRef" : "#/components/schemas/EqualsIntegerRuleParametersSpec", + "$ref" : "#/components/schemas/EqualsIntegerRuleParametersSpec" }, "error" : { "description" : "Default alerting thresholdthat raises a data quality issue at an error severity level", - "$ref" : "#/components/schemas/EqualsIntegerRuleParametersSpec", - "originalRef" : "#/components/schemas/EqualsIntegerRuleParametersSpec" + "originalRef" : "#/components/schemas/EqualsIntegerRuleParametersSpec", + "$ref" : "#/components/schemas/EqualsIntegerRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/EqualsIntegerRuleParametersSpec", - "originalRef" : "#/components/schemas/EqualsIntegerRuleParametersSpec" + "originalRef" : "#/components/schemas/EqualsIntegerRuleParametersSpec", + "$ref" : "#/components/schemas/EqualsIntegerRuleParametersSpec" } } } @@ -92224,8 +92255,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/TableColumnCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableColumnCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableColumnCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableColumnCountSensorParametersSpec" } } } @@ -92334,15 +92365,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -92379,23 +92410,23 @@ }, "parameters" : { "description" : "Column list hash sensor parameters", - "$ref" : "#/components/schemas/TableColumnListUnorderedHashSensorParametersSpec", - "originalRef" : "#/components/schemas/TableColumnListUnorderedHashSensorParametersSpec" + "originalRef" : "#/components/schemas/TableColumnListUnorderedHashSensorParametersSpec", + "$ref" : "#/components/schemas/TableColumnListUnorderedHashSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" }, "error" : { "description" : "Default alerting thresholdthat raises a data quality issue at an error severity level", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" } } } @@ -92504,15 +92535,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -92549,23 +92580,23 @@ }, "parameters" : { "description" : "Column list and order sensor parameters", - "$ref" : "#/components/schemas/TableColumnListOrderedHashSensorParametersSpec", - "originalRef" : "#/components/schemas/TableColumnListOrderedHashSensorParametersSpec" + "originalRef" : "#/components/schemas/TableColumnListOrderedHashSensorParametersSpec", + "$ref" : "#/components/schemas/TableColumnListOrderedHashSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" }, "error" : { "description" : "Default alerting thresholdthat raises a data quality issue at an error severity level", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" } } } @@ -92674,15 +92705,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -92719,23 +92750,23 @@ }, "parameters" : { "description" : "Column list and types sensor parameters", - "$ref" : "#/components/schemas/TableColumnTypesHashSensorParametersSpec", - "originalRef" : "#/components/schemas/TableColumnTypesHashSensorParametersSpec" + "originalRef" : "#/components/schemas/TableColumnTypesHashSensorParametersSpec", + "$ref" : "#/components/schemas/TableColumnTypesHashSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning that is considered as a passed data quality check", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" }, "error" : { "description" : "Default alerting thresholdthat raises a data quality issue at an error severity level", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue which indicates a serious data quality problem", - "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec", - "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec" + "originalRef" : "#/components/schemas/ValueChangedRuleParametersSpec", + "$ref" : "#/components/schemas/ValueChangedRuleParametersSpec" } } } @@ -92794,34 +92825,34 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_column_count" : { "description" : "Detects if the number of column matches an expected number. Retrieves the metadata of the monitored table, counts the number of columns and compares it to an expected value (an expected number of columns). Stores the most recent column count for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableSchemaColumnCountCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnCountCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnCountCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnCountCheckSpec" }, "daily_column_count_changed" : { "description" : "Detects if the count of columns has changed since the most recent day. Retrieves the metadata of the monitored table, counts the number of columns and compares it the last known column count that was captured when this data quality check was executed the last time. Stores the most recent column count for each day when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableSchemaColumnCountChangedCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnCountChangedCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnCountChangedCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnCountChangedCheckSpec" }, "daily_column_list_changed" : { "description" : "Detects if new columns were added or existing columns were removed since the most recent day. Retrieves the metadata of the monitored table and calculates an unordered hash of the column names. Compares the current hash to the previously known hash to detect any changes to the list of columns.", - "$ref" : "#/components/schemas/TableSchemaColumnListChangedCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnListChangedCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnListChangedCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnListChangedCheckSpec" }, "daily_column_list_or_order_changed" : { "description" : "Detects if new columns were added, existing columns were removed or the columns were reordered since the most recent day. Retrieves the metadata of the monitored table and calculates an ordered hash of the column names. Compares the current hash to the previously known hash to detect any changes to the list of columns or their order.", - "$ref" : "#/components/schemas/TableSchemaColumnListOrOrderChangedCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnListOrOrderChangedCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnListOrOrderChangedCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnListOrOrderChangedCheckSpec" }, "daily_column_types_changed" : { "description" : "Detects if new columns were added, removed or their data types have changed since the most recent day. Retrieves the metadata of the monitored table and calculates an unordered hash of the column names and the data types (including the length, scale, precision, nullability). Compares the current hash to the previously known hash to detect any changes to the list of columns or their types.", - "$ref" : "#/components/schemas/TableSchemaColumnTypesChangedCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnTypesChangedCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnTypesChangedCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnTypesChangedCheckSpec" } } } @@ -92880,34 +92911,34 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_column_count" : { "description" : "Detects if the number of column matches an expected number. Retrieves the metadata of the monitored table, counts the number of columns and compares it to an expected value (an expected number of columns). Stores the most recent column count for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableSchemaColumnCountCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnCountCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnCountCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnCountCheckSpec" }, "monthly_column_count_changed" : { "description" : "Detects if the count of columns has changed since the last month. Retrieves the metadata of the monitored table, counts the number of columns and compares it the last known column count that was captured when this data quality check was executed the last time. Stores the most recent column count for each month when the data quality check was evaluated.", - "$ref" : "#/components/schemas/TableSchemaColumnCountChangedCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnCountChangedCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnCountChangedCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnCountChangedCheckSpec" }, "monthly_column_list_changed" : { "description" : "Detects if new columns were added or existing columns were removed since the last month. Retrieves the metadata of the monitored table and calculates an unordered hash of the column names. Compares the current hash to the previously known hash to detect any changes to the list of columns.", - "$ref" : "#/components/schemas/TableSchemaColumnListChangedCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnListChangedCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnListChangedCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnListChangedCheckSpec" }, "monthly_column_list_or_order_changed" : { "description" : "Detects if new columns were added, existing columns were removed or the columns were reordered since the last month. Retrieves the metadata of the monitored table and calculates an ordered hash of the column names. Compares the current hash to the previously known hash to detect any changes to the list of columns or their order.", - "$ref" : "#/components/schemas/TableSchemaColumnListOrOrderChangedCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnListOrOrderChangedCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnListOrOrderChangedCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnListOrOrderChangedCheckSpec" }, "monthly_column_types_changed" : { "description" : "Detects if new columns were added, removed or their data types have changed since the last month. Retrieves the metadata of the monitored table and calculates an unordered hash of the column names and the data types (including the length, scale, precision, nullability). Compares the current hash to the previously known hash to detect any changes to the list of columns or their types.", - "$ref" : "#/components/schemas/TableSchemaColumnTypesChangedCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnTypesChangedCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnTypesChangedCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnTypesChangedCheckSpec" } } } @@ -92966,34 +92997,34 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_column_count" : { "description" : "Detects if the number of column matches an expected number. Retrieves the metadata of the monitored table, counts the number of columns and compares it to an expected value (an expected number of columns).", - "$ref" : "#/components/schemas/TableSchemaColumnCountCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnCountCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnCountCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnCountCheckSpec" }, "profile_column_count_changed" : { "description" : "Detects if the count of columns has changed. Retrieves the metadata of the monitored table, counts the number of columns and compares it the last known column count that was captured when this data quality check was executed the last time.", - "$ref" : "#/components/schemas/TableSchemaColumnCountChangedCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnCountChangedCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnCountChangedCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnCountChangedCheckSpec" }, "profile_column_list_changed" : { "description" : "Detects if new columns were added or existing columns were removed. Retrieves the metadata of the monitored table and calculates an unordered hash of the column names. Compares the current hash to the previously known hash to detect any changes to the list of columns.", - "$ref" : "#/components/schemas/TableSchemaColumnListChangedCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnListChangedCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnListChangedCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnListChangedCheckSpec" }, "profile_column_list_or_order_changed" : { "description" : "Detects if new columns were added, existing columns were removed or the columns were reordered. Retrieves the metadata of the monitored table and calculates an ordered hash of the column names. Compares the current hash to the previously known hash to detect any changes to the list of columns or their order.", - "$ref" : "#/components/schemas/TableSchemaColumnListOrOrderChangedCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnListOrOrderChangedCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnListOrOrderChangedCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnListOrOrderChangedCheckSpec" }, "profile_column_types_changed" : { "description" : "Detects if new columns were added, removed or their data types have changed. Retrieves the metadata of the monitored table and calculates an unordered hash of the column names and the data types (including the length, scale, precision, nullability). Compares the current hash to the previously known hash to detect any changes to the list of columns or their types.", - "$ref" : "#/components/schemas/TableSchemaColumnTypesChangedCheckSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnTypesChangedCheckSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnTypesChangedCheckSpec", + "$ref" : "#/components/schemas/TableSchemaColumnTypesChangedCheckSpec" } } } @@ -93014,8 +93045,8 @@ "properties" : { "column_count" : { "description" : "Configuration of the column count profiler.", - "$ref" : "#/components/schemas/TableSchemaColumnCountStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/TableSchemaColumnCountStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/TableSchemaColumnCountStatisticsCollectorSpec", + "$ref" : "#/components/schemas/TableSchemaColumnCountStatisticsCollectorSpec" } } } @@ -93258,13 +93289,13 @@ }, "timestamp_columns" : { "description" : "Column names that store the timestamps that identify the event (transaction) timestamp and the ingestion (inserted / loaded at) timestamps. Also configures the timestamp source for the date/time partitioned data quality checks (event timestamp or ingestion timestamp).", - "$ref" : "#/components/schemas/TimestampColumnsSpec", - "originalRef" : "#/components/schemas/TimestampColumnsSpec" + "originalRef" : "#/components/schemas/TimestampColumnsSpec", + "$ref" : "#/components/schemas/TimestampColumnsSpec" }, "incremental_time_window" : { "description" : "Configuration of the time window for analyzing daily or monthly partitions. Specifies the number of recent days and recent months that are analyzed when the partitioned data quality checks are run in an incremental mode (the default mode).", - "$ref" : "#/components/schemas/PartitionIncrementalTimeWindowSpec", - "originalRef" : "#/components/schemas/PartitionIncrementalTimeWindowSpec" + "originalRef" : "#/components/schemas/PartitionIncrementalTimeWindowSpec", + "$ref" : "#/components/schemas/PartitionIncrementalTimeWindowSpec" }, "default_grouping_name" : { "type" : "string", @@ -93274,59 +93305,59 @@ "type" : "object", "description" : "Data grouping configurations list. Data grouping configurations are configured in two cases: (1) the data in the table should be analyzed with a GROUP BY condition, to analyze different datasets using separate time series, for example a table contains data from multiple countries and there is a 'country' column used for partitioning. (2) a tag is assigned to a table (within a data grouping level hierarchy), when the data is segmented at a table level (similar tables store the same information, but for different countries, etc.).", "additionalProperties" : { - "$ref" : "#/definitions/DataGroupingConfigurationSpec", - "originalRef" : "#/definitions/DataGroupingConfigurationSpec" + "originalRef" : "#/definitions/DataGroupingConfigurationSpec", + "$ref" : "#/definitions/DataGroupingConfigurationSpec" } }, "table_comparisons" : { "type" : "object", "description" : "Dictionary of data comparison configurations. Data comparison configurations are used for comparisons between data sources to compare this table (called the compared table) with other reference tables (the source of truth). The reference table's metadata must be imported into DQOps, but the reference table may be located in another data source. DQOps will compare metrics calculated for groups of rows (using the GROUP BY clause). For each comparison, the user must specify a name of a data grouping. The number of data grouping dimensions in the parent table and the reference table defined in the selected data grouping configurations must match. DQOps will run the same data quality sensors on both the parent table (table under test) and the reference table (the source of truth), comparing the measures (sensor readouts) captured from both tables.", "additionalProperties" : { - "$ref" : "#/definitions/TableComparisonConfigurationSpec", - "originalRef" : "#/definitions/TableComparisonConfigurationSpec" + "originalRef" : "#/definitions/TableComparisonConfigurationSpec", + "$ref" : "#/definitions/TableComparisonConfigurationSpec" } }, "incident_grouping" : { "description" : "Incident grouping configuration with the overridden configuration at a table level. The configured field value in this object will override the default configuration from the connection level. Incident grouping level can be changed or incident creation can be disabled.", - "$ref" : "#/components/schemas/TableIncidentGroupingSpec", - "originalRef" : "#/components/schemas/TableIncidentGroupingSpec" + "originalRef" : "#/components/schemas/TableIncidentGroupingSpec", + "$ref" : "#/components/schemas/TableIncidentGroupingSpec" }, "owner" : { "description" : "Table owner information like the data steward name or the business application name.", - "$ref" : "#/components/schemas/TableOwnerSpec", - "originalRef" : "#/components/schemas/TableOwnerSpec" + "originalRef" : "#/components/schemas/TableOwnerSpec", + "$ref" : "#/components/schemas/TableOwnerSpec" }, "profiling_checks" : { "description" : "Configuration of data quality profiling checks that are enabled. Pick a check from a category, apply the parameters and rules to enable it.", - "$ref" : "#/components/schemas/TableProfilingCheckCategoriesSpec", - "originalRef" : "#/components/schemas/TableProfilingCheckCategoriesSpec" + "originalRef" : "#/components/schemas/TableProfilingCheckCategoriesSpec", + "$ref" : "#/components/schemas/TableProfilingCheckCategoriesSpec" }, "monitoring_checks" : { "description" : "Configuration of table level monitoring checks. Monitoring checks are data quality checks that are evaluated for each period of time (daily, weekly, monthly, etc.). A monitoring check stores only the most recent data quality check result for each period of time.", - "$ref" : "#/components/schemas/TableMonitoringCheckCategoriesSpec", - "originalRef" : "#/components/schemas/TableMonitoringCheckCategoriesSpec" + "originalRef" : "#/components/schemas/TableMonitoringCheckCategoriesSpec", + "$ref" : "#/components/schemas/TableMonitoringCheckCategoriesSpec" }, "partitioned_checks" : { "description" : "Configuration of table level date/time partitioned checks. Partitioned data quality checks are evaluated for each partition separately, raising separate alerts at a partition level. The table does not need to be physically partitioned by date, it is possible to run data quality checks for each day or month of data separately.", - "$ref" : "#/components/schemas/TablePartitionedCheckCategoriesSpec", - "originalRef" : "#/components/schemas/TablePartitionedCheckCategoriesSpec" + "originalRef" : "#/components/schemas/TablePartitionedCheckCategoriesSpec", + "$ref" : "#/components/schemas/TablePartitionedCheckCategoriesSpec" }, "statistics" : { "description" : "Configuration of table level data statistics collector (a basic profiler). Configures which statistics collectors are enabled and how they are configured.", - "$ref" : "#/components/schemas/TableStatisticsCollectorsRootCategoriesSpec", - "originalRef" : "#/components/schemas/TableStatisticsCollectorsRootCategoriesSpec" + "originalRef" : "#/components/schemas/TableStatisticsCollectorsRootCategoriesSpec", + "$ref" : "#/components/schemas/TableStatisticsCollectorsRootCategoriesSpec" }, "schedules_override" : { "description" : "Configuration of the job scheduler that runs data quality checks. The scheduler configuration is divided into types of checks that have different schedules.", - "$ref" : "#/components/schemas/CronSchedulesSpec", - "originalRef" : "#/components/schemas/CronSchedulesSpec" + "originalRef" : "#/components/schemas/CronSchedulesSpec", + "$ref" : "#/components/schemas/CronSchedulesSpec" }, "columns" : { "type" : "object", "description" : "Dictionary of columns, indexed by a physical column name. Column specification contains the expected column data type and a list of column level data quality checks that are enabled for a column.", "additionalProperties" : { - "$ref" : "#/definitions/ColumnSpec", - "originalRef" : "#/definitions/ColumnSpec" + "originalRef" : "#/definitions/ColumnSpec", + "$ref" : "#/definitions/ColumnSpec" } }, "labels" : { @@ -93340,14 +93371,14 @@ "type" : "array", "description" : "Comments used for change tracking and documenting changes directly in the table data quality specification file.", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "file_format" : { "description" : "File format with the specification used as a source data. It overrides the connection spec's file format when it is set", - "$ref" : "#/components/schemas/FileFormatSpec", - "originalRef" : "#/components/schemas/FileFormatSpec" + "originalRef" : "#/components/schemas/FileFormatSpec", + "$ref" : "#/components/schemas/FileFormatSpec" }, "advanced_properties" : { "type" : "object", @@ -93360,8 +93391,8 @@ "type" : "array", "description" : "A list of source tables. This information is used to define the data lineage report for the table.", "items" : { - "$ref" : "#/components/schemas/TableLineageSourceSpec", - "originalRef" : "#/components/schemas/TableLineageSourceSpec" + "originalRef" : "#/components/schemas/TableLineageSourceSpec", + "$ref" : "#/components/schemas/TableLineageSourceSpec" } } } @@ -93471,15 +93502,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -93516,23 +93547,23 @@ }, "parameters" : { "description" : "Sensor parameters with the custom SQL aggregate expression that is evaluated on a table. Use an {alias} token to reference the tested table.", - "$ref" : "#/components/schemas/TableSqlAggregatedExpressionSensorParametersSpec", - "originalRef" : "#/components/schemas/TableSqlAggregatedExpressionSensorParametersSpec" + "originalRef" : "#/components/schemas/TableSqlAggregatedExpressionSensorParametersSpec", + "$ref" : "#/components/schemas/TableSqlAggregatedExpressionSensorParametersSpec" }, "warning" : { "description" : "Default alerting threshold for warnings raised when the aggregated value is above the maximum accepted value.", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "error" : { "description" : "Default alerting threshold for errors raised when the aggregated value is above the maximum accepted value.", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" }, "fatal" : { "description" : "Default alerting threshold for fatal data quality issues raised when the aggregated value is above the maximum accepted value.", - "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec", - "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec" + "originalRef" : "#/components/schemas/BetweenFloatsRuleParametersSpec", + "$ref" : "#/components/schemas/BetweenFloatsRuleParametersSpec" } } } @@ -93675,15 +93706,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -93720,23 +93751,23 @@ }, "parameters" : { "description" : "Sensor parameters with the custom SQL condition (an expression that returns true/false) which is evaluated on a each row.", - "$ref" : "#/components/schemas/TableSqlConditionFailedCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableSqlConditionFailedCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableSqlConditionFailedCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableSqlConditionFailedCountSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning when a given number of rows failed the custom SQL condition (expression). The warning is considered as a passed data quality check.", - "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0WarningParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a maximum number of rows failing the custom SQL condition (expression) that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule0ErrorParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule0ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue when a given number of rows failed the custom SQL condition (expression). A fatal issue indicates a serious data quality problem that should result in stopping the data pipelines.", - "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec", - "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec" + "originalRef" : "#/components/schemas/MaxCountRule100ParametersSpec", + "$ref" : "#/components/schemas/MaxCountRule100ParametersSpec" } } } @@ -93879,15 +93910,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -93924,23 +93955,23 @@ }, "parameters" : { "description" : "Sensor parameters with the custom SQL condition (an expression that returns true/false) which is evaluated on a each row", - "$ref" : "#/components/schemas/TableSqlConditionPassedPercentSensorParametersSpec", - "originalRef" : "#/components/schemas/TableSqlConditionPassedPercentSensorParametersSpec" + "originalRef" : "#/components/schemas/TableSqlConditionPassedPercentSensorParametersSpec", + "$ref" : "#/components/schemas/TableSqlConditionPassedPercentSensorParametersSpec" }, "warning" : { "description" : "Alerting threshold that raises a data quality warning when a minimum acceptable percentage of rows did not pass the custom SQL condition (expression). The warning is considered as a passed data quality check.", - "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100WarningParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100WarningParametersSpec" }, "error" : { "description" : "Default alerting threshold for a minimum acceptable percentage of rows passing the custom SQL condition (expression) that raises a data quality error (alert).", - "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule100ErrorParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule100ErrorParametersSpec" }, "fatal" : { "description" : "Alerting threshold that raises a fatal data quality issue when a minimum acceptable percentage of rows did not pass the custom SQL condition (expression). A fatal issue indicates a serious data quality problem that should result in stopping the data pipelines.", - "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec", - "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec" + "originalRef" : "#/components/schemas/MinPercentRule95ParametersSpec", + "$ref" : "#/components/schemas/MinPercentRule95ParametersSpec" } } } @@ -94083,15 +94114,15 @@ "properties" : { "schedule_override" : { "description" : "Run check scheduling configuration. Specifies the schedule (a cron expression) when the data quality checks are executed by the scheduler.", - "$ref" : "#/components/schemas/CronScheduleSpec", - "originalRef" : "#/components/schemas/CronScheduleSpec" + "originalRef" : "#/components/schemas/CronScheduleSpec", + "$ref" : "#/components/schemas/CronScheduleSpec" }, "comments" : { "type" : "array", "description" : "Comments for change tracking. Please put comments in this collection because YAML comments may be removed when the YAML file is modified by the tool (serialization and deserialization will remove non tracked comments).", "items" : { - "$ref" : "#/components/schemas/CommentSpec", - "originalRef" : "#/components/schemas/CommentSpec" + "originalRef" : "#/components/schemas/CommentSpec", + "$ref" : "#/components/schemas/CommentSpec" } }, "disabled" : { @@ -94128,23 +94159,23 @@ }, "parameters" : { "description" : "Sensor parameters with the custom SQL SELECT statement that queries a log table to get a result of a custom query that retrieves results from other data quality libraries.", - "$ref" : "#/components/schemas/TableSqlImportCustomResultSensorParametersSpec", - "originalRef" : "#/components/schemas/TableSqlImportCustomResultSensorParametersSpec" + "originalRef" : "#/components/schemas/TableSqlImportCustomResultSensorParametersSpec", + "$ref" : "#/components/schemas/TableSqlImportCustomResultSensorParametersSpec" }, "warning" : { "description" : "Warning severity import rule. Activate the rule with no parameters to import custom data quality results when the custom query returns a value **1** in the *severity* result column.", - "$ref" : "#/components/schemas/ImportSeverityRuleParametersSpec", - "originalRef" : "#/components/schemas/ImportSeverityRuleParametersSpec" + "originalRef" : "#/components/schemas/ImportSeverityRuleParametersSpec", + "$ref" : "#/components/schemas/ImportSeverityRuleParametersSpec" }, "error" : { "description" : "Error severity import rule. Activate the rule with no parameters to import custom data quality results when the custom query returns a value **2** in the *severity* result column.", - "$ref" : "#/components/schemas/ImportSeverityRuleParametersSpec", - "originalRef" : "#/components/schemas/ImportSeverityRuleParametersSpec" + "originalRef" : "#/components/schemas/ImportSeverityRuleParametersSpec", + "$ref" : "#/components/schemas/ImportSeverityRuleParametersSpec" }, "fatal" : { "description" : "Fatal severity import rule. Activate the rule with no parameters to import custom data quality results when the custom query returns a value **3** in the *severity* result column.", - "$ref" : "#/components/schemas/ImportSeverityRuleParametersSpec", - "originalRef" : "#/components/schemas/ImportSeverityRuleParametersSpec" + "originalRef" : "#/components/schemas/ImportSeverityRuleParametersSpec", + "$ref" : "#/components/schemas/ImportSeverityRuleParametersSpec" } } } @@ -94204,12 +94235,12 @@ "properties" : { "volume" : { "description" : "Configuration of volume statistics collectors on a table level.", - "$ref" : "#/components/schemas/TableVolumeStatisticsCollectorsSpec", - "originalRef" : "#/components/schemas/TableVolumeStatisticsCollectorsSpec" + "originalRef" : "#/components/schemas/TableVolumeStatisticsCollectorsSpec", + "$ref" : "#/components/schemas/TableVolumeStatisticsCollectorsSpec" }, "schema" : { - "$ref" : "#/components/schemas/TableSchemaStatisticsCollectorsSpec", - "originalRef" : "#/components/schemas/TableSchemaStatisticsCollectorsSpec" + "originalRef" : "#/components/schemas/TableSchemaStatisticsCollectorsSpec", + "$ref" : "#/components/schemas/TableSchemaStatisticsCollectorsSpec" } } } @@ -94271,26 +94302,26 @@ }, "table" : { "description" : "Physical table name including the schema and table names.", - "$ref" : "#/components/schemas/PhysicalTableName", - "originalRef" : "#/components/schemas/PhysicalTableName" + "originalRef" : "#/components/schemas/PhysicalTableName", + "$ref" : "#/components/schemas/PhysicalTableName" }, "statistics" : { "type" : "array", "description" : "List of collected table level statistics.", "items" : { - "$ref" : "#/components/schemas/StatisticsMetricModel", - "originalRef" : "#/components/schemas/StatisticsMetricModel" + "originalRef" : "#/components/schemas/StatisticsMetricModel", + "$ref" : "#/components/schemas/StatisticsMetricModel" } }, "collect_table_statistics_job_template" : { "description" : "Configured parameters for the \"collect statistics\" job that should be pushed to the job queue in order to run all statistics collectors within this table, limited only to the table level statistics (row count, etc).", - "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters", - "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters" + "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters", + "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters" }, "collect_table_and_column_statistics_job_template" : { "description" : "Configured parameters for the \"collect statistics\" job that should be pushed to the job queue in order to run all statistics collectors within this table, including statistics for all columns.", - "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters", - "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters" + "originalRef" : "#/components/schemas/StatisticsCollectorSearchFilters", + "$ref" : "#/components/schemas/StatisticsCollectorSearchFilters" }, "can_collect_statistics" : { "type" : "boolean", @@ -94349,29 +94380,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_data_freshness" : { "description" : "Daily calculating the number of days since the most recent event timestamp (freshness)", - "$ref" : "#/components/schemas/TableDataFreshnessCheckSpec", - "originalRef" : "#/components/schemas/TableDataFreshnessCheckSpec" + "originalRef" : "#/components/schemas/TableDataFreshnessCheckSpec", + "$ref" : "#/components/schemas/TableDataFreshnessCheckSpec" }, "daily_data_freshness_anomaly" : { "description" : "Verifies that the number of days since the most recent event timestamp (freshness) changes in a rate within a percentile boundary during the last 90 days.", - "$ref" : "#/components/schemas/TableDataFreshnessAnomalyCheckSpec", - "originalRef" : "#/components/schemas/TableDataFreshnessAnomalyCheckSpec" + "originalRef" : "#/components/schemas/TableDataFreshnessAnomalyCheckSpec", + "$ref" : "#/components/schemas/TableDataFreshnessAnomalyCheckSpec" }, "daily_data_staleness" : { "description" : "Daily calculating the time difference in days between the current date and the most recent data ingestion timestamp (staleness)", - "$ref" : "#/components/schemas/TableDataStalenessCheckSpec", - "originalRef" : "#/components/schemas/TableDataStalenessCheckSpec" + "originalRef" : "#/components/schemas/TableDataStalenessCheckSpec", + "$ref" : "#/components/schemas/TableDataStalenessCheckSpec" }, "daily_data_ingestion_delay" : { "description" : "Daily calculating the time difference in days between the most recent event timestamp and the most recent ingestion timestamp", - "$ref" : "#/components/schemas/TableDataIngestionDelayCheckSpec", - "originalRef" : "#/components/schemas/TableDataIngestionDelayCheckSpec" + "originalRef" : "#/components/schemas/TableDataIngestionDelayCheckSpec", + "$ref" : "#/components/schemas/TableDataIngestionDelayCheckSpec" } } } @@ -94415,19 +94446,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_data_ingestion_delay" : { "description" : "Daily partitioned check calculating the time difference in days between the most recent event timestamp and the most recent ingestion timestamp", - "$ref" : "#/components/schemas/TableDataIngestionDelayCheckSpec", - "originalRef" : "#/components/schemas/TableDataIngestionDelayCheckSpec" + "originalRef" : "#/components/schemas/TableDataIngestionDelayCheckSpec", + "$ref" : "#/components/schemas/TableDataIngestionDelayCheckSpec" }, "daily_partition_reload_lag" : { "description" : "Daily partitioned check calculating the longest time a row waited to be loaded, it is the maximum difference in days between the ingestion timestamp and the event timestamp column on any row in the monitored partition", - "$ref" : "#/components/schemas/TablePartitionReloadLagCheckSpec", - "originalRef" : "#/components/schemas/TablePartitionReloadLagCheckSpec" + "originalRef" : "#/components/schemas/TablePartitionReloadLagCheckSpec", + "$ref" : "#/components/schemas/TablePartitionReloadLagCheckSpec" } } } @@ -94545,24 +94576,24 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_data_freshness" : { "description" : "Monthly monitoring calculating the number of days since the most recent event timestamp (freshness)", - "$ref" : "#/components/schemas/TableDataFreshnessCheckSpec", - "originalRef" : "#/components/schemas/TableDataFreshnessCheckSpec" + "originalRef" : "#/components/schemas/TableDataFreshnessCheckSpec", + "$ref" : "#/components/schemas/TableDataFreshnessCheckSpec" }, "monthly_data_staleness" : { "description" : "Monthly monitoring calculating the time difference in days between the current date and the most recent data ingestion timestamp (staleness)", - "$ref" : "#/components/schemas/TableDataStalenessCheckSpec", - "originalRef" : "#/components/schemas/TableDataStalenessCheckSpec" + "originalRef" : "#/components/schemas/TableDataStalenessCheckSpec", + "$ref" : "#/components/schemas/TableDataStalenessCheckSpec" }, "monthly_data_ingestion_delay" : { "description" : "Monthly monitoring calculating the time difference in days between the most recent event timestamp and the most recent ingestion timestamp", - "$ref" : "#/components/schemas/TableDataIngestionDelayCheckSpec", - "originalRef" : "#/components/schemas/TableDataIngestionDelayCheckSpec" + "originalRef" : "#/components/schemas/TableDataIngestionDelayCheckSpec", + "$ref" : "#/components/schemas/TableDataIngestionDelayCheckSpec" } } } @@ -94606,19 +94637,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_data_ingestion_delay" : { "description" : "Monthly partitioned check calculating the time difference in days between the most recent event timestamp and the most recent ingestion timestamp", - "$ref" : "#/components/schemas/TableDataIngestionDelayCheckSpec", - "originalRef" : "#/components/schemas/TableDataIngestionDelayCheckSpec" + "originalRef" : "#/components/schemas/TableDataIngestionDelayCheckSpec", + "$ref" : "#/components/schemas/TableDataIngestionDelayCheckSpec" }, "monthly_partition_reload_lag" : { "description" : "Monthly partitioned check calculating the longest time a row waited to be loaded, it is the maximum difference in days between the ingestion timestamp and the event timestamp column on any row in the monitored partition", - "$ref" : "#/components/schemas/TablePartitionReloadLagCheckSpec", - "originalRef" : "#/components/schemas/TablePartitionReloadLagCheckSpec" + "originalRef" : "#/components/schemas/TablePartitionReloadLagCheckSpec", + "$ref" : "#/components/schemas/TablePartitionReloadLagCheckSpec" } } } @@ -94695,29 +94726,29 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_data_freshness" : { "description" : "Calculates the number of days since the most recent event timestamp (freshness)", - "$ref" : "#/components/schemas/TableDataFreshnessCheckSpec", - "originalRef" : "#/components/schemas/TableDataFreshnessCheckSpec" + "originalRef" : "#/components/schemas/TableDataFreshnessCheckSpec", + "$ref" : "#/components/schemas/TableDataFreshnessCheckSpec" }, "profile_data_freshness_anomaly" : { "description" : "Verifies that the number of days since the most recent event timestamp (freshness) changes in a rate within a percentile boundary during the last 90 days.", - "$ref" : "#/components/schemas/TableDataFreshnessAnomalyCheckSpec", - "originalRef" : "#/components/schemas/TableDataFreshnessAnomalyCheckSpec" + "originalRef" : "#/components/schemas/TableDataFreshnessAnomalyCheckSpec", + "$ref" : "#/components/schemas/TableDataFreshnessAnomalyCheckSpec" }, "profile_data_staleness" : { "description" : "Calculates the time difference in days between the current date and the most recent data ingestion timestamp (staleness)", - "$ref" : "#/components/schemas/TableDataStalenessCheckSpec", - "originalRef" : "#/components/schemas/TableDataStalenessCheckSpec" + "originalRef" : "#/components/schemas/TableDataStalenessCheckSpec", + "$ref" : "#/components/schemas/TableDataStalenessCheckSpec" }, "profile_data_ingestion_delay" : { "description" : "Calculates the time difference in days between the most recent event timestamp and the most recent ingestion timestamp", - "$ref" : "#/components/schemas/TableDataIngestionDelayCheckSpec", - "originalRef" : "#/components/schemas/TableDataIngestionDelayCheckSpec" + "originalRef" : "#/components/schemas/TableDataIngestionDelayCheckSpec", + "$ref" : "#/components/schemas/TableDataIngestionDelayCheckSpec" } } } @@ -94761,19 +94792,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_duplicate_record_count" : { "description" : "Verifies that the number of duplicate record values in a table does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/TableDuplicateRecordCountCheckSpec", - "originalRef" : "#/components/schemas/TableDuplicateRecordCountCheckSpec" + "originalRef" : "#/components/schemas/TableDuplicateRecordCountCheckSpec", + "$ref" : "#/components/schemas/TableDuplicateRecordCountCheckSpec" }, "daily_duplicate_record_percent" : { "description" : "Verifies that the percentage of duplicate record values in a table does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec", - "originalRef" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec" + "originalRef" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec", + "$ref" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec" } } } @@ -94817,19 +94848,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_duplicate_record_count" : { "description" : "Verifies that the number of duplicate record values in a table does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/TableDuplicateRecordCountCheckSpec", - "originalRef" : "#/components/schemas/TableDuplicateRecordCountCheckSpec" + "originalRef" : "#/components/schemas/TableDuplicateRecordCountCheckSpec", + "$ref" : "#/components/schemas/TableDuplicateRecordCountCheckSpec" }, "daily_partition_duplicate_record_percent" : { "description" : "Verifies that the percentage of duplicate record values in a table does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec", - "originalRef" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec" + "originalRef" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec", + "$ref" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec" } } } @@ -94873,19 +94904,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_duplicate_record_count" : { "description" : "Verifies that the number of duplicate record values in a table does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/TableDuplicateRecordCountCheckSpec", - "originalRef" : "#/components/schemas/TableDuplicateRecordCountCheckSpec" + "originalRef" : "#/components/schemas/TableDuplicateRecordCountCheckSpec", + "$ref" : "#/components/schemas/TableDuplicateRecordCountCheckSpec" }, "monthly_duplicate_record_percent" : { "description" : "Verifies that the percentage of duplicate record values in a table does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec", - "originalRef" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec" + "originalRef" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec", + "$ref" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec" } } } @@ -94929,19 +94960,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_duplicate_record_count" : { "description" : "Verifies that the number of duplicate record values in a table does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/TableDuplicateRecordCountCheckSpec", - "originalRef" : "#/components/schemas/TableDuplicateRecordCountCheckSpec" + "originalRef" : "#/components/schemas/TableDuplicateRecordCountCheckSpec", + "$ref" : "#/components/schemas/TableDuplicateRecordCountCheckSpec" }, "monthly_partition_duplicate_record_percent" : { "description" : "Verifies that the percentage of duplicate record values in a table does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec", - "originalRef" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec" + "originalRef" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec", + "$ref" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec" } } } @@ -94985,19 +95016,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_duplicate_record_count" : { "description" : "Verifies that the number of duplicate record values in a table does not exceed the maximum accepted count.", - "$ref" : "#/components/schemas/TableDuplicateRecordCountCheckSpec", - "originalRef" : "#/components/schemas/TableDuplicateRecordCountCheckSpec" + "originalRef" : "#/components/schemas/TableDuplicateRecordCountCheckSpec", + "$ref" : "#/components/schemas/TableDuplicateRecordCountCheckSpec" }, "profile_duplicate_record_percent" : { "description" : "Verifies that the percentage of duplicate record values in a table does not exceed the maximum accepted percentage.", - "$ref" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec", - "originalRef" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec" + "originalRef" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec", + "$ref" : "#/components/schemas/TableDuplicateRecordPercentCheckSpec" } } } @@ -95061,39 +95092,39 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_row_count" : { "description" : "Verifies that the tested table has at least a minimum accepted number of rows. The default configuration of the warning, error and fatal severity rules verifies a minimum row count of one row, which ensures that the table is not empty. Stores the most recent captured row count value for each day when the row count was evaluated.", - "$ref" : "#/components/schemas/TableRowCountCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountCheckSpec", + "$ref" : "#/components/schemas/TableRowCountCheckSpec" }, "daily_row_count_anomaly" : { "description" : "Detects when the row count has changed too much since the previous day. It uses time series anomaly detection to find the biggest volume changes during the last 90 days.", - "$ref" : "#/components/schemas/TableRowCountAnomalyDifferencingCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountAnomalyDifferencingCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountAnomalyDifferencingCheckSpec", + "$ref" : "#/components/schemas/TableRowCountAnomalyDifferencingCheckSpec" }, "daily_row_count_change" : { "description" : "Detects when the volume's (row count) change since the last known row count exceeds the maximum accepted change percentage.", - "$ref" : "#/components/schemas/TableRowCountChangeCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountChangeCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountChangeCheckSpec", + "$ref" : "#/components/schemas/TableRowCountChangeCheckSpec" }, "daily_row_count_change_1_day" : { "description" : "Detects when the volume's change (increase or decrease of the row count) since the previous day exceeds the maximum accepted change percentage. ", - "$ref" : "#/components/schemas/TableRowCountChange1DayCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountChange1DayCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountChange1DayCheckSpec", + "$ref" : "#/components/schemas/TableRowCountChange1DayCheckSpec" }, "daily_row_count_change_7_days" : { "description" : "This check verifies that the percentage of change in the table's volume (row count) since seven days ago is below the maximum accepted percentage. Verifying a volume change since a value a week ago overcomes the effect of weekly seasonability.", - "$ref" : "#/components/schemas/TableRowCountChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountChange7DaysCheckSpec", + "$ref" : "#/components/schemas/TableRowCountChange7DaysCheckSpec" }, "daily_row_count_change_30_days" : { "description" : "This check verifies that the percentage of change in the table's volume (row count) since thirty days ago is below the maximum accepted percentage. Comparing the current row count to a value 30 days ago overcomes the effect of monthly seasonability.", - "$ref" : "#/components/schemas/TableRowCountChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountChange30DaysCheckSpec", + "$ref" : "#/components/schemas/TableRowCountChange30DaysCheckSpec" } } } @@ -95157,39 +95188,39 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "daily_partition_row_count" : { "description" : "Verifies that each daily partition in the tested table has at least a minimum accepted number of rows. The default configuration of the warning, error and fatal severity rules verifies a minimum row count of one row, which ensures that the partition is not empty.", - "$ref" : "#/components/schemas/TableRowCountCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountCheckSpec", + "$ref" : "#/components/schemas/TableRowCountCheckSpec" }, "daily_partition_row_count_anomaly" : { "description" : "Detects outstanding partitions whose volume (the row count) differs too much from the average daily partition size. It uses time series anomaly detection to find the outliers in the partition volume during the last 90 days.", - "$ref" : "#/components/schemas/TableRowCountAnomalyStationaryPartitionCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountAnomalyStationaryPartitionCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountAnomalyStationaryPartitionCheckSpec", + "$ref" : "#/components/schemas/TableRowCountAnomalyStationaryPartitionCheckSpec" }, "daily_partition_row_count_change" : { "description" : "Detects when the partition's volume (row count) change between the current daily partition and the previous partition exceeds the maximum accepted change percentage.", - "$ref" : "#/components/schemas/TableRowCountChangeCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountChangeCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountChangeCheckSpec", + "$ref" : "#/components/schemas/TableRowCountChangeCheckSpec" }, "daily_partition_row_count_change_1_day" : { "description" : "Detects when the partition volume change (increase or decrease of the row count) since yesterday's daily partition exceeds the maximum accepted change percentage. ", - "$ref" : "#/components/schemas/TableRowCountChange1DayCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountChange1DayCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountChange1DayCheckSpec", + "$ref" : "#/components/schemas/TableRowCountChange1DayCheckSpec" }, "daily_partition_row_count_change_7_days" : { "description" : "This check verifies that the percentage of change in the partition's volume (row count) since seven days ago is below the maximum accepted percentage. Verifying a volume change since a value a week ago overcomes the effect of weekly seasonability.", - "$ref" : "#/components/schemas/TableRowCountChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountChange7DaysCheckSpec", + "$ref" : "#/components/schemas/TableRowCountChange7DaysCheckSpec" }, "daily_partition_row_count_change_30_days" : { "description" : "This check verifies that the percentage of change in the partition's volume (row count) since thirty days ago is below the maximum accepted percentage. Comparing the current row count to a value 30 days ago overcomes the effect of monthly seasonability.", - "$ref" : "#/components/schemas/TableRowCountChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountChange30DaysCheckSpec", + "$ref" : "#/components/schemas/TableRowCountChange30DaysCheckSpec" } } } @@ -95233,19 +95264,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_row_count" : { "description" : "Verifies that the tested table has at least a minimum accepted number of rows. The default configuration of the warning, error and fatal severity rules verifies a minimum row count of one row, which ensures that the table is not empty. Stores the most recent captured row count value for each month when the row count was evaluated.", - "$ref" : "#/components/schemas/TableRowCountCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountCheckSpec", + "$ref" : "#/components/schemas/TableRowCountCheckSpec" }, "monthly_row_count_change" : { "description" : "Detects when the volume (row count) changes since the last known row count from a previous month exceeds the maximum accepted change percentage.", - "$ref" : "#/components/schemas/TableRowCountChangeCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountChangeCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountChangeCheckSpec", + "$ref" : "#/components/schemas/TableRowCountChangeCheckSpec" } } } @@ -95289,19 +95320,19 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "monthly_partition_row_count" : { "description" : "Verifies that each monthly partition in the tested table has at least a minimum accepted number of rows. The default configuration of the warning, error and fatal severity rules verifies a minimum row count of one row, which ensures that the partition is not empty.", - "$ref" : "#/components/schemas/TableRowCountCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountCheckSpec", + "$ref" : "#/components/schemas/TableRowCountCheckSpec" }, "monthly_partition_row_count_change" : { "description" : "Detects when the partition's volume (row count) change between the current monthly partition and the previous partition exceeds the maximum accepted change percentage.", - "$ref" : "#/components/schemas/TableRowCountChangeCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountChangeCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountChangeCheckSpec", + "$ref" : "#/components/schemas/TableRowCountChangeCheckSpec" } } } @@ -95365,39 +95396,39 @@ "type" : "object", "description" : "Dictionary of additional custom checks within this category. The keys are check names defined in the definition section. The sensor parameters and rules should match the type of the configured sensor and rule for the custom check.", "additionalProperties" : { - "$ref" : "#/definitions/CustomCheckSpec", - "originalRef" : "#/definitions/CustomCheckSpec" + "originalRef" : "#/definitions/CustomCheckSpec", + "$ref" : "#/definitions/CustomCheckSpec" } }, "profile_row_count" : { "description" : "Verifies that the tested table has at least a minimum accepted number of rows. The default configuration of the warning, error and fatal severity rules verifies a minimum row count of one row, which ensures that the table is not empty.", - "$ref" : "#/components/schemas/TableRowCountCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountCheckSpec", + "$ref" : "#/components/schemas/TableRowCountCheckSpec" }, "profile_row_count_anomaly" : { "description" : "Detects when the row count has changed too much since the previous day. It uses time series anomaly detection to find the biggest volume changes during the last 90 days.", - "$ref" : "#/components/schemas/TableRowCountAnomalyDifferencingCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountAnomalyDifferencingCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountAnomalyDifferencingCheckSpec", + "$ref" : "#/components/schemas/TableRowCountAnomalyDifferencingCheckSpec" }, "profile_row_count_change" : { "description" : "Detects when the volume's (row count) change since the last known row count exceeds the maximum accepted change percentage.", - "$ref" : "#/components/schemas/TableRowCountChangeCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountChangeCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountChangeCheckSpec", + "$ref" : "#/components/schemas/TableRowCountChangeCheckSpec" }, "profile_row_count_change_1_day" : { "description" : "Detects when the volume's change (increase or decrease of the row count) since the previous day exceeds the maximum accepted change percentage.", - "$ref" : "#/components/schemas/TableRowCountChange1DayCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountChange1DayCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountChange1DayCheckSpec", + "$ref" : "#/components/schemas/TableRowCountChange1DayCheckSpec" }, "profile_row_count_change_7_days" : { "description" : "This check verifies that the percentage of change in the table's volume (row count) since seven days ago is below the maximum accepted percentage. Verifying a volume change since a value a week ago overcomes the effect of weekly seasonability. ", - "$ref" : "#/components/schemas/TableRowCountChange7DaysCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountChange7DaysCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountChange7DaysCheckSpec", + "$ref" : "#/components/schemas/TableRowCountChange7DaysCheckSpec" }, "profile_row_count_change_30_days" : { "description" : "This check verifies that the percentage of change in the table's volume (row count) since thirty days ago is below the maximum accepted percentage. Comparing the current row count to a value 30 days ago overcomes the effect of monthly seasonability.", - "$ref" : "#/components/schemas/TableRowCountChange30DaysCheckSpec", - "originalRef" : "#/components/schemas/TableRowCountChange30DaysCheckSpec" + "originalRef" : "#/components/schemas/TableRowCountChange30DaysCheckSpec", + "$ref" : "#/components/schemas/TableRowCountChange30DaysCheckSpec" } } } @@ -95452,8 +95483,8 @@ }, "parameters" : { "description" : "Profiler parameters", - "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", - "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" + "originalRef" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec", + "$ref" : "#/components/schemas/TableVolumeRowCountSensorParametersSpec" } } } @@ -95474,8 +95505,8 @@ "properties" : { "row_count" : { "description" : "Configuration of the row count profiler.", - "$ref" : "#/components/schemas/TableVolumeRowCountStatisticsCollectorSpec", - "originalRef" : "#/components/schemas/TableVolumeRowCountStatisticsCollectorSpec" + "originalRef" : "#/components/schemas/TableVolumeRowCountStatisticsCollectorSpec", + "$ref" : "#/components/schemas/TableVolumeRowCountStatisticsCollectorSpec" } } } @@ -95710,8 +95741,8 @@ "type" : "boolean" }, "duration" : { - "$ref" : "#/components/schemas/Duration", - "originalRef" : "#/components/schemas/Duration" + "originalRef" : "#/components/schemas/Duration", + "$ref" : "#/components/schemas/Duration" }, "durationEstimated" : { "type" : "boolean" @@ -95719,6 +95750,95 @@ } } }, + "TeradataParametersSpec" : { + "type" : "object", + "properties" : { + "host" : { + "type" : "string", + "description" : "Teradata host name. Supports also a ${TERADATA_HOST} configuration with a custom environment variable.", + "extensions" : { }, + "exampleSetFlag" : false, + "types" : [ "string" ] + }, + "port" : { + "type" : "string", + "description" : "Teradata port number. The default port is 1025. Supports also a ${TERADATA_PORT} configuration with a custom environment variable.", + "extensions" : { }, + "exampleSetFlag" : false, + "types" : [ "string" ] + }, + "user" : { + "type" : "string", + "description" : "Teradata user name. The value can be in the ${ENVIRONMENT_VARIABLE_NAME} format to use dynamic substitution.", + "extensions" : { }, + "exampleSetFlag" : false, + "types" : [ "string" ] + }, + "password" : { + "type" : "string", + "description" : "Teradata database password. The value can be in the ${ENVIRONMENT_VARIABLE_NAME} format to use dynamic substitution.", + "extensions" : { }, + "exampleSetFlag" : false, + "types" : [ "string" ] + }, + "properties" : { + "type" : "object", + "additionalProperties" : { + "type" : "string", + "extensions" : { }, + "exampleSetFlag" : false, + "types" : [ "string" ] + }, + "description" : "A dictionary of custom JDBC parameters that are added to the JDBC connection string, a key/value dictionary.", + "extensions" : { }, + "exampleSetFlag" : false, + "types" : [ "object" ], + "jsonSchema" : { + "type" : "object", + "description" : "A dictionary of custom JDBC parameters that are added to the JDBC connection string, a key/value dictionary." + } + }, + "database" : { + "type" : "string", + "extensions" : { }, + "exampleSetFlag" : false, + "types" : [ "string" ] + } + }, + "exampleSetFlag" : false, + "types" : [ "object" ], + "jsonSchema" : { + "type" : "object", + "properties" : { + "host" : { + "type" : "string", + "description" : "Teradata host name. Supports also a ${TERADATA_HOST} configuration with a custom environment variable." + }, + "port" : { + "type" : "string", + "description" : "Teradata port number. The default port is 1025. Supports also a ${TERADATA_PORT} configuration with a custom environment variable." + }, + "user" : { + "type" : "string", + "description" : "Teradata user name. The value can be in the ${ENVIRONMENT_VARIABLE_NAME} format to use dynamic substitution." + }, + "password" : { + "type" : "string", + "description" : "Teradata database password. The value can be in the ${ENVIRONMENT_VARIABLE_NAME} format to use dynamic substitution." + }, + "properties" : { + "type" : "object", + "description" : "A dictionary of custom JDBC parameters that are added to the JDBC connection string, a key/value dictionary.", + "additionalProperties" : { + "type" : "string" + } + }, + "database" : { + "type" : "string" + } + } + } + }, "TimeWindowFilterParameters" : { "type" : "object", "properties" : { @@ -95980,20 +96100,20 @@ "additionalProperties" : { "type" : "array", "items" : { - "$ref" : "#/definitions/IncidentModel", - "originalRef" : "#/definitions/IncidentModel" + "originalRef" : "#/definitions/IncidentModel", + "$ref" : "#/definitions/IncidentModel" } } }, "openIncidentSeverityLevelCounts" : { "description" : "Incident severity level count container for the incident with open status.", - "$ref" : "#/components/schemas/IncidentSeverityLevelCountsModel", - "originalRef" : "#/components/schemas/IncidentSeverityLevelCountsModel" + "originalRef" : "#/components/schemas/IncidentSeverityLevelCountsModel", + "$ref" : "#/components/schemas/IncidentSeverityLevelCountsModel" }, "acknowledgedIncidentSeverityLevelCounts" : { "description" : "Incident severity level count container for the incident with acknowledged status.", - "$ref" : "#/components/schemas/IncidentSeverityLevelCountsModel", - "originalRef" : "#/components/schemas/IncidentSeverityLevelCountsModel" + "originalRef" : "#/components/schemas/IncidentSeverityLevelCountsModel", + "$ref" : "#/components/schemas/IncidentSeverityLevelCountsModel" } } } @@ -96300,7 +96420,7 @@ "ProviderType" : { "type" : "string", "exampleSetFlag" : false, - "enum" : [ "bigquery", "databricks", "mysql", "oracle", "postgresql", "duckdb", "presto", "redshift", "snowflake", "spark", "sqlserver", "trino", "hana", "db2", "mariadb", "clickhouse", "questdb" ] + "enum" : [ "bigquery", "clickhouse", "databricks", "db2", "duckdb", "hana", "mariadb", "mysql", "oracle", "postgresql", "presto", "questdb", "redshift", "snowflake", "spark", "sqlserver", "teradata", "trino" ] }, "ConnectionTestStatus" : { "type" : "string", @@ -96410,7 +96530,7 @@ "DisplayHint" : { "type" : "string", "exampleSetFlag" : false, - "enum" : [ "textarea", "column_names" ] + "enum" : [ "textarea", "column_names", "requires_paid_version" ] }, "PostgresqlSslMode" : { "type" : "string",