Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP - fix date part bug #1598

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ def match(self, candidate_specs: Sequence[InstanceSpec]) -> Sequence[InstanceSpe
return candidate_specs

# If there are metrics in the query, use max metric default. For no-metric queries, use standard default.
# TODO: [custom granularity] allow custom granularities to be used as defaults if appropriate
default_granularity = ExpandedTimeGranularity.from_time_granularity(
self.max_metric_default_time_granularity or DEFAULT_TIME_GRANULARITY
)
Expand All @@ -68,11 +67,16 @@ def match(self, candidate_specs: Sequence[InstanceSpec]) -> Sequence[InstanceSpe

matched_metric_time_specs: Tuple[TimeDimensionSpec, ...] = ()
for spec_key, time_grains in spec_key_to_grains.items():
if default_granularity in time_grains:
matched_metric_time_specs += (spec_key_to_specs[spec_key][0].with_grain(default_granularity),)
else:
if (
# If date part was included in the request, don't filter here. Minimium granularity filter will be used instead.
# Granularity is essentially overridden by date part, so this is only important for internal resolution.
spec_key.source_spec.date_part is None
# If default_granularity is not in the available options, then time granularity was specified in the request
# and a default is not needed here. Pass all options through for this spec key.
and default_granularity in time_grains
):
matched_metric_time_specs += (spec_key_to_specs[spec_key][0].with_grain(default_granularity),)
else:
matched_metric_time_specs += spec_key_to_specs[spec_key]

matching_specs: Sequence[LinkableInstanceSpec] = (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,11 @@ def from_call_parameter_set(
ParameterSetField.DATE_PART,
]

if time_dimension_call_parameter_set.time_granularity_name is not None:
time_granularity = time_dimension_call_parameter_set.time_granularity_name
if time_dimension_call_parameter_set.date_part:
time_granularity = None

if time_granularity is not None:
fields_to_compare.append(ParameterSetField.TIME_GRANULARITY)

return TimeDimensionPattern(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -126,8 +126,10 @@ def choose_time_spine_source(
}

# Standard grains can be satisfied by any time spine with a base grain that's <= the standard grain.
# If date part was requested, granularity doesn't matter. For simplicity of reasoning, assume DAY as
smallest_required_standard_grain = min(
spec.time_granularity.base_granularity for spec in required_time_spine_specs
TimeGranularity.DAY if spec.date_part is not None else spec.time_granularity.base_granularity
for spec in required_time_spine_specs
)
compatible_time_spines_for_standard_grains = {
grain: time_spine_source
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@
from dbt_semantic_interfaces.type_enums.date_part import DatePart
from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity
from metricflow_semantics.filters.time_constraint import TimeRangeConstraint
from metricflow_semantics.query.query_parser import MetricFlowQueryParser
from metricflow_semantics.specs.metric_spec import MetricSpec
from metricflow_semantics.specs.query_param_implementations import TimeDimensionParameter
from metricflow_semantics.specs.query_spec import MetricFlowQuerySpec
from metricflow_semantics.specs.time_dimension_spec import TimeDimensionSpec
from metricflow_semantics.test_helpers.config_helpers import MetricFlowTestConfiguration
Expand Down Expand Up @@ -465,3 +467,49 @@ def test_subdaily_granularity_overrides_metric_default_granularity( # noqa: D10
dataflow_plan_builder=dataflow_plan_builder,
query_spec=query_spec,
)


@pytest.mark.sql_engine_snapshot
def test_date_part_with_non_default_grain( # noqa: D103
request: FixtureRequest,
mf_test_configuration: MetricFlowTestConfiguration,
dataflow_plan_builder: DataflowPlanBuilder,
dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter,
sql_client: SqlClient,
query_parser: MetricFlowQueryParser,
) -> None:
query_spec = query_parser.parse_and_validate_query(
metric_names=["archived_users"], group_by=(TimeDimensionParameter(name="metric_time", date_part=DatePart.YEAR),)
).query_spec

render_and_check(
request=request,
mf_test_configuration=mf_test_configuration,
dataflow_to_sql_converter=dataflow_to_sql_converter,
sql_client=sql_client,
dataflow_plan_builder=dataflow_plan_builder,
query_spec=query_spec,
)


@pytest.mark.sql_engine_snapshot
def test_metric_time_date_part( # noqa: D103
request: FixtureRequest,
mf_test_configuration: MetricFlowTestConfiguration,
dataflow_plan_builder: DataflowPlanBuilder,
dataflow_to_sql_converter: DataflowToSqlQueryPlanConverter,
sql_client: SqlClient,
query_parser: MetricFlowQueryParser,
) -> None:
query_spec = query_parser.parse_and_validate_query(
group_by=(TimeDimensionParameter(name="metric_time", date_part=DatePart.YEAR),)
).query_spec

render_and_check(
request=request,
mf_test_configuration=mf_test_configuration,
dataflow_to_sql_converter=dataflow_to_sql_converter,
sql_client=sql_client,
dataflow_plan_builder=dataflow_plan_builder,
query_spec=query_spec,
)
Loading
Loading