Skip to content

Commit

Permalink
Merge pull request #951 from gooddata/snapshot-master-ff4c2de7-to-rel…
Browse files Browse the repository at this point in the history
…/dev

[bot] Merge master/ff4c2de7 into rel/dev
  • Loading branch information
yenkins-admin authored Jan 16, 2025
2 parents dc33e80 + ff4c2de commit 32c4d47
Show file tree
Hide file tree
Showing 11 changed files with 20 additions and 29 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ repos:
args: [ '--maxkb=890' ]
- id: check-case-conflict
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.6
rev: v0.9.1
hooks:
# Run the linter.
- id: ruff
Expand Down
2 changes: 1 addition & 1 deletion fmt-requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
ruff==0.8.6
ruff==0.9.1
3 changes: 1 addition & 2 deletions gooddata-fdw/gooddata_fdw/filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,7 @@ def extract_filters_from_quals(quals: list[Qual], table_columns: dict[str, Colum
new_filter = _qual_to_attribute_filter(filter_entity, qual)
else:
_log_info(
f"extract_filters_from_quals: field_name={qual.field_name} is not attribute, "
f"but {type(filter_entity)}"
f"extract_filters_from_quals: field_name={qual.field_name} is not attribute, but {type(filter_entity)}"
)
if new_filter:
filters.append(new_filter)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def _extract_invocation_payload(
payload = orjson.loads(descriptor.command)
except Exception:
raise ErrorInfo.bad_argument(
"Incorrect FlexConnect function invocation. The invocation payload is " "not a valid JSON."
"Incorrect FlexConnect function invocation. The invocation payload is not a valid JSON."
)

fun = payload.get("functionName")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def _check_function(self, fun: type[FlexConnectFunction]) -> str:

if fun.Schema is None:
raise ValueError(
f"FlexConnect function '{fun.Name}' implemented in class {fun.__name__} d" f"oes not specify schema."
f"FlexConnect function '{fun.Name}' implemented in class {fun.__name__} does not specify schema."
)

return fun.Name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,10 @@
P = ParamSpec("P")


def rpc_decorator() -> (
Callable[
[Callable[Concatenate[Any, pyarrow.flight.ServerCallContext, P], T]],
Callable[Concatenate[Any, pyarrow.flight.ServerCallContext, P], T],
]
):
def rpc_decorator() -> Callable[
[Callable[Concatenate[Any, pyarrow.flight.ServerCallContext, P], T]],
Callable[Concatenate[Any, pyarrow.flight.ServerCallContext, P], T],
]:
def _factory(
fun: Callable[Concatenate[Any, pyarrow.flight.ServerCallContext, P], T],
) -> Callable[Concatenate[Any, pyarrow.flight.ServerCallContext, P], T]:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -264,9 +264,7 @@ def _shutdown_server(self) -> None:
def wait_for_stop(self, timeout: Optional[float] = None) -> bool:
if self._flight_shutdown_thread is None:
# this is really some mess in the caller code.. did not call stop() but tries to wait for it..
raise AssertionError(
"Flight server stop() was not issued yet attempting to wait for " "the server to stop."
)
raise AssertionError("Flight server stop() was not issued yet attempting to wait for the server to stop.")

if self._flight_shutdown_thread.is_alive():
self._flight_shutdown_thread.join(timeout=timeout)
Expand Down
6 changes: 3 additions & 3 deletions gooddata-sdk/gooddata_sdk/catalog/export/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,9 @@ def _get_exported_content(
Raises:
ValueError: If the server is not able to return a response or if the input values are invalid.
"""
assert (
timeout > 0 and retry > 0 and max_retry > 0
), f"Timeout value '{timeout}' or retry value '{retry}' or max retry value '{max_retry}' is negative."
assert timeout > 0 and retry > 0 and max_retry > 0, (
f"Timeout value '{timeout}' or retry value '{retry}' or max retry value '{max_retry}' is negative."
)
assert timeout > retry, f"Retry value {retry} cannot be higher than timeout value {timeout}"
assert retry <= max_retry, f"Retry value {retry} must be smaller or the same as max retry value {max_retry}"
response = get_func(workspace_id=workspace_id, export_id=export_id, _preload_content=False)
Expand Down
2 changes: 1 addition & 1 deletion gooddata-sdk/gooddata_sdk/catalog/organization/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,7 @@ def update_identity_provider(self, identity_provider: CatalogIdentityProvider) -
self._entities_api.update_entity_identity_providers(identity_provider.id, identity_provider_document)
except NotFoundException:
raise ValueError(
f"Can not update {identity_provider.id} identity provider. " f"This identity provider does not exist."
f"Can not update {identity_provider.id} identity provider. This identity provider does not exist."
)

# Layout APIs
Expand Down
12 changes: 4 additions & 8 deletions gooddata-sdk/gooddata_sdk/compute/model/filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,8 +157,7 @@ def __init__(self, dataset: ObjId, granularity: str, from_shift: int, to_shift:

if granularity not in _GRANULARITY:
raise ValueError(
f"Invalid relative date filter granularity '{granularity}'."
f"It is expected to be one of: {_GRANULARITY}"
f"Invalid relative date filter granularity '{granularity}'. It is expected to be one of: {_GRANULARITY}"
)

self._dataset = dataset
Expand Down Expand Up @@ -373,8 +372,7 @@ def __init__(
else:
if not isinstance(values, (int, float)) and len(values) != 1:
raise ValueError(
f"Invalid number of values for {operator}. "
f"Expected single int, float or one-sized list or tuple."
f"Invalid number of values for {operator}. Expected single int, float or one-sized list or tuple."
)
# Convert int to float as AFM model filters accept float values
self._values = (float(values),) if isinstance(values, (int, float)) else values
Expand Down Expand Up @@ -456,8 +454,7 @@ def __init__(

if operator not in _RANKING_OPERATORS:
raise ValueError(
f"Invalid ranking filter operator type '{operator}'."
f"It is expected to be one of: {_RANKING_OPERATORS}"
f"Invalid ranking filter operator type '{operator}'. It is expected to be one of: {_RANKING_OPERATORS}"
)

self._metrics = [_extract_id_or_local_id(m) for m in metrics]
Expand Down Expand Up @@ -504,6 +501,5 @@ def description(self, labels: dict[str, str], format_locale: Optional[str] = Non
)
metric_ids = [m.id if isinstance(m, ObjId) else m for m in self.metrics]
return (
f"{self.operator.capitalize()} {self.value}{dimensionality_str} "
f"{labels.get(metric_ids[0], metric_ids[0])}"
f"{self.operator.capitalize()} {self.value}{dimensionality_str} {labels.get(metric_ids[0], metric_ids[0])}"
)
4 changes: 2 additions & 2 deletions tox-requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
tox~=4.20.0
tox-uv~=1.12.0
tox~=4.23.2
tox-uv~=1.17.0

0 comments on commit 32c4d47

Please sign in to comment.