diff --git a/fakesnow/arrow.py b/fakesnow/arrow.py index 696cf3b..bdca005 100644 --- a/fakesnow/arrow.py +++ b/fakesnow/arrow.py @@ -5,7 +5,7 @@ import pyarrow as pa import pyarrow.compute as pc -from fakesnow.types import ColumnInfo +from fakesnow.rowtype import ColumnInfo def to_sf_schema(schema: pa.Schema, rowtype: list[ColumnInfo]) -> pa.Schema: diff --git a/fakesnow/conn.py b/fakesnow/conn.py index 199cc74..a96ac22 100644 --- a/fakesnow/conn.py +++ b/fakesnow/conn.py @@ -60,7 +60,7 @@ def __init__( where upper(catalog_name) = '{self.database}'""" ).fetchone() ): - db_file = f"{self.db_path/self.database}.db" if self.db_path else ":memory:" + db_file = f"{self.db_path / self.database}.db" if self.db_path else ":memory:" duck_conn.execute(f"ATTACH DATABASE '{db_file}' AS {self.database}") duck_conn.execute(info_schema.creation_sql(self.database)) duck_conn.execute(macros.creation_sql(self.database)) diff --git a/fakesnow/cursor.py b/fakesnow/cursor.py index c7951b9..d4661e8 100644 --- a/fakesnow/cursor.py +++ b/fakesnow/cursor.py @@ -24,7 +24,7 @@ import fakesnow.expr as expr import fakesnow.info_schema as info_schema import fakesnow.transforms as transforms -from fakesnow.types import describe_as_result_metadata +from fakesnow.rowtype import describe_as_result_metadata if TYPE_CHECKING: # don't require pandas at import time diff --git a/fakesnow/info_schema.py b/fakesnow/info_schema.py index 4f3bf73..d82a31a 100644 --- a/fakesnow/info_schema.py +++ b/fakesnow/info_schema.py @@ -146,7 +146,7 @@ def insert_table_comment_sql(catalog: str, schema: str, table: str, comment: str def insert_text_lengths_sql(catalog: str, schema: str, table: str, text_lengths: list[tuple[str, int]]) -> str: values = ", ".join( - f"('{catalog}', '{schema}', '{table}', '{col_name}', {size}, {min(size*4,16777216)})" + f"('{catalog}', '{schema}', '{table}', '{col_name}', {size}, {min(size * 4, 16777216)})" for (col_name, size) in text_lengths ) diff --git a/fakesnow/types.py b/fakesnow/rowtype.py similarity index 100% rename from fakesnow/types.py rename to fakesnow/rowtype.py diff --git a/fakesnow/server.py b/fakesnow/server.py index 2be43bb..d40b8cf 100644 --- a/fakesnow/server.py +++ b/fakesnow/server.py @@ -17,7 +17,7 @@ from fakesnow.arrow import to_ipc, to_sf from fakesnow.fakes import FakeSnowflakeConnection from fakesnow.instance import FakeSnow -from fakesnow.types import describe_as_rowtype +from fakesnow.rowtype import describe_as_rowtype shared_fs = FakeSnow() sessions: dict[str, FakeSnowflakeConnection] = {} diff --git a/fakesnow/transforms.py b/fakesnow/transforms.py index 17a894a..b952a50 100644 --- a/fakesnow/transforms.py +++ b/fakesnow/transforms.py @@ -133,7 +133,7 @@ def create_database(expression: exp.Expression, db_path: Path | None = None) -> ident = expression.find(exp.Identifier) assert ident, f"No identifier in {expression.sql}" db_name = ident.this - db_file = f"{db_path/db_name}.db" if db_path else ":memory:" + db_file = f"{db_path / db_name}.db" if db_path else ":memory:" if_not_exists = "IF NOT EXISTS " if expression.args.get("exists") else "" diff --git a/fakesnow/transforms_merge.py b/fakesnow/transforms_merge.py index abb7020..ca15798 100644 --- a/fakesnow/transforms_merge.py +++ b/fakesnow/transforms_merge.py @@ -83,9 +83,9 @@ def _create_merge_candidates(merge_expr: exp.Merge) -> exp.Expression: sql = f""" CREATE OR REPLACE TEMPORARY TABLE merge_candidates AS SELECT - {', '.join(sorted(values))}, + {", ".join(sorted(values))}, CASE - {' '.join(case_when_clauses)} + {" ".join(case_when_clauses)} ELSE NULL END AS MERGE_OP FROM {target_tbl} @@ -191,12 +191,12 @@ def _counts(merge_expr: exp.Merge) -> exp.Expression: operations["inserted"].append(w_idx) count_statements = [ - f"""COUNT_IF(merge_op in ({','.join(map(str, indices))})) as \"number of rows {op}\"""" + f"""COUNT_IF(merge_op in ({",".join(map(str, indices))})) as \"number of rows {op}\"""" for op, indices in operations.items() if indices ] sql = f""" - SELECT {', '.join(count_statements)} + SELECT {", ".join(count_statements)} FROM merge_candidates """ diff --git a/pyproject.toml b/pyproject.toml index a206414..0deff54 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ dev = [ "pyarrow-stubs==10.0.1.9", "pytest~=8.0", "pytest-asyncio", - "ruff~=0.8.1", + "ruff~=0.9.4", "twine~=6.0", "snowflake-sqlalchemy~=1.7.0", ] diff --git a/tests/test_arrow.py b/tests/test_arrow.py index 7ae28b9..bddd5c9 100644 --- a/tests/test_arrow.py +++ b/tests/test_arrow.py @@ -8,7 +8,7 @@ import pytz from fakesnow.arrow import timestamp_to_sf_struct, to_ipc, to_sf_schema -from fakesnow.types import ColumnInfo, describe_as_rowtype +from fakesnow.rowtype import ColumnInfo, describe_as_rowtype def rowtype(types: list[str]) -> list[ColumnInfo]: diff --git a/tests/test_patch.py b/tests/test_patch.py index 3e8c493..5f22ac4 100644 --- a/tests/test_patch.py +++ b/tests/test_patch.py @@ -14,9 +14,9 @@ def test_patch_snowflake_connector_connect(_fakesnow_no_auto_create: None) -> No def test_patch_snowflake_connector_pandas_tools_write_pandas(_fakesnow_no_auto_create: None) -> None: - assert isinstance( - snowflake.connector.pandas_tools.write_pandas, MagicMock - ), "snowflake.connector.pandas_tools.write_pandas is not mocked" + assert isinstance(snowflake.connector.pandas_tools.write_pandas, MagicMock), ( + "snowflake.connector.pandas_tools.write_pandas is not mocked" + ) def test_patch_this_modules_connect() -> None: