From 039004a6572d862e44ca51ece6f406dcd4a535e8 Mon Sep 17 00:00:00 2001 From: Dat Nguyen <103571964+il-dat@users.noreply.github.com> Date: Tue, 3 Oct 2023 15:18:22 +0700 Subject: [PATCH 01/21] Update get_column_name_lists.sql (#393) --- macros/upload_results/get_column_name_lists.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/macros/upload_results/get_column_name_lists.sql b/macros/upload_results/get_column_name_lists.sql index ea708467..7911e866 100644 --- a/macros/upload_results/get_column_name_lists.sql +++ b/macros/upload_results/get_column_name_lists.sql @@ -52,8 +52,8 @@ {% elif dataset == 'model_executions' %} ( - node_id, command_invocation_id, + node_id, run_started_at, was_full_refresh, thread_id, @@ -76,8 +76,8 @@ {% elif dataset == 'models' %} ( - node_id, command_invocation_id, + node_id, run_started_at, database, schema, From 5d2df3fe2a3877f693594d46bcb4f7bd126575b4 Mon Sep 17 00:00:00 2001 From: Gemma Down <52132406+glsdown@users.noreply.github.com> Date: Thu, 5 Oct 2023 12:41:09 +0100 Subject: [PATCH 02/21] Tidy up execution timings logic (#389) * Minor tidy up * Simplifying timing extract --- integration_test_project/example-env.sh | 1 + .../upload_seed_executions.sql | 72 ++++--------------- .../upload_snapshot_executions.sql | 72 ++++--------------- .../upload_test_executions.sql | 48 +++---------- 4 files changed, 33 insertions(+), 160 deletions(-) diff --git a/integration_test_project/example-env.sh b/integration_test_project/example-env.sh index 51450cf3..47cb0d67 100755 --- a/integration_test_project/example-env.sh +++ b/integration_test_project/example-env.sh @@ -18,6 +18,7 @@ export DBT_ENV_SPARK_DRIVER_PATH= # /Library/simba/spark/lib/libsparkodbc_sbu.dy export DBT_ENV_SPARK_ENDPOINT= # The endpoint ID from the Databricks HTTP path # dbt environment variables, change these +export DBT_VERSION="1_5_0" export DBT_CLOUD_PROJECT_ID= export DBT_CLOUD_JOB_ID= export DBT_CLOUD_RUN_ID= diff --git a/macros/upload_individual_datasets/upload_seed_executions.sql b/macros/upload_individual_datasets/upload_seed_executions.sql index ca947ac2..1ccbfe2a 100644 --- a/macros/upload_individual_datasets/upload_seed_executions.sql +++ b/macros/upload_individual_datasets/upload_seed_executions.sql @@ -38,26 +38,10 @@ '{{ model.thread_id }}', {# thread_id #} '{{ model.status }}', {# status #} - {% if model.timing != [] %} - {% for stage in model.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in model.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ model.execution_time }}, {# total_node_runtime #} null, -- rows_affected not available {# Only available in Snowflake #} @@ -95,26 +79,10 @@ '{{ model.thread_id }}', {# thread_id #} '{{ model.status }}', {# status #} - {% if model.timing != [] %} - {% for stage in model.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in model.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ model.execution_time }}, {# total_node_runtime #} null, -- rows_affected not available {# Databricks #} @@ -170,26 +138,10 @@ '{{ model.thread_id }}', {# thread_id #} '{{ model.status }}', {# status #} - {% if model.timing != [] %} - {% for stage in model.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in model.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ model.execution_time }}, {# total_node_runtime #} try_cast('{{ model.adapter_response.rows_affected }}' as int), {# rows_affected #} diff --git a/macros/upload_individual_datasets/upload_snapshot_executions.sql b/macros/upload_individual_datasets/upload_snapshot_executions.sql index 369348cd..2006b168 100644 --- a/macros/upload_individual_datasets/upload_snapshot_executions.sql +++ b/macros/upload_individual_datasets/upload_snapshot_executions.sql @@ -38,26 +38,10 @@ '{{ model.thread_id }}', {# thread_id #} '{{ model.status }}', {# status #} - {% if model.timing != [] %} - {% for stage in model.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in model.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ model.execution_time }}, {# total_node_runtime #} null, -- rows_affected not available {# Only available in Snowflake #} @@ -95,26 +79,10 @@ '{{ model.thread_id }}', {# thread_id #} '{{ model.status }}', {# status #} - {% if model.timing != [] %} - {% for stage in model.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in model.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ model.execution_time }}, {# total_node_runtime #} null, -- rows_affected not available {# Databricks #} @@ -170,26 +138,10 @@ '{{ model.thread_id }}', {# thread_id #} '{{ model.status }}', {# status #} - {% if model.timing != [] %} - {% for stage in model.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in model.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ model.execution_time }}, {# total_node_runtime #} try_cast('{{ model.adapter_response.rows_affected }}' as int), {# rows_affected #} diff --git a/macros/upload_individual_datasets/upload_test_executions.sql b/macros/upload_individual_datasets/upload_test_executions.sql index a42b03ba..ea3553ae 100644 --- a/macros/upload_individual_datasets/upload_test_executions.sql +++ b/macros/upload_individual_datasets/upload_test_executions.sql @@ -35,26 +35,10 @@ '{{ test.thread_id }}', {# thread_id #} '{{ test.status }}', {# status #} - {% if test.timing != [] %} - {% for stage in test.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in test.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ test.execution_time }}, {# total_node_runtime #} null, {# rows_affected not available in Databricks #} @@ -89,26 +73,10 @@ '{{ test.thread_id }}', {# thread_id #} '{{ test.status }}', {# status #} - {% if test.timing != [] %} - {% for stage in test.timing if stage.name == "compile" %} - {% if loop.length == 0 %} - null, {# compile_started_at #} - {% else %} - '{{ stage.started_at }}', {# compile_started_at #} - {% endif %} - {% endfor %} - - {% for stage in test.timing if stage.name == "execute" %} - {% if loop.length == 0 %} - null, {# query_completed_at #} - {% else %} - '{{ stage.completed_at }}', {# query_completed_at #} - {% endif %} - {% endfor %} - {% else %} - null, {# compile_started_at #} - null, {# query_completed_at #} - {% endif %} + {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ test.execution_time }}, {# total_node_runtime #} null, {# rows_affected not available in Databricks #} From 165945f9564230a1417d9498936c2a2a8fc70bf7 Mon Sep 17 00:00:00 2001 From: Gemma Down <52132406+glsdown@users.noreply.github.com> Date: Thu, 5 Oct 2023 13:01:15 +0100 Subject: [PATCH 03/21] Update version numbers (#394) --- README.md | 2 +- dbt_project.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 17688961..6c537660 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ See the generated [dbt docs site](https://brooklyn-data.github.io/dbt_artifacts/ ``` packages: - package: brooklyn-data/dbt_artifacts - version: 2.6.0 + version: 2.6.1 ``` :construction_worker: Make sure to fix at least the **minor** version, to avoid issues when a new release is open. See the notes on upgrading below for more detail. diff --git a/dbt_project.yml b/dbt_project.yml index 4a031fa9..ee9e85ca 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -1,5 +1,5 @@ name: "dbt_artifacts" -version: "2.6.0" +version: "2.6.1" config-version: 2 require-dbt-version: [">=1.3.0", "<1.7.0"] profile: "dbt_artifacts" From deeec8bcfd6462dcbd5bbc2ea878e811dd0f3484 Mon Sep 17 00:00:00 2001 From: Gemma Down <52132406+glsdown@users.noreply.github.com> Date: Thu, 16 Nov 2023 12:34:05 +0000 Subject: [PATCH 04/21] Add support for 1.7 (#403) --- .github/workflows/ci_lint_package.yml | 2 +- .github/workflows/ci_test_package.yml | 4 +- .github/workflows/main_lint_package.yml | 2 +- .github/workflows/main_test_package.yml | 2 +- .github/workflows/publish_docs_on_release.yml | 2 +- dbt_project.yml | 2 +- tox.ini | 45 ++++++++++++++++--- 7 files changed, 46 insertions(+), 13 deletions(-) diff --git a/.github/workflows/ci_lint_package.yml b/.github/workflows/ci_lint_package.yml index 9f1508f3..2563e0cf 100644 --- a/.github/workflows/ci_lint_package.yml +++ b/.github/workflows/ci_lint_package.yml @@ -50,7 +50,7 @@ jobs: architecture: "x64" - name: Install Python packages - run: python -m pip install dbt-snowflake~=1.6.0 sqlfluff-templater-dbt~=2.3.2 + run: python -m pip install dbt-snowflake~=1.7.0 sqlfluff-templater-dbt~=2.3.2 - name: Test database connection run: dbt debug diff --git a/.github/workflows/ci_test_package.yml b/.github/workflows/ci_test_package.yml index b02997ee..034a316f 100644 --- a/.github/workflows/ci_test_package.yml +++ b/.github/workflows/ci_test_package.yml @@ -21,7 +21,7 @@ env: DBT_ENV_SECRET_DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} DBT_ENV_SECRET_GCP_PROJECT: ${{ secrets.GCP_PROJECT }} # Env var to test version - LAST_RELEASE_SUPPORTED_DBT_VERSION: 1_6_0 # A dbt version supported by both the last release and this one + LAST_RELEASE_SUPPORTED_DBT_VERSION: 1_7_0 # A dbt version supported by both the last release and this one # Env vars to test invocations model DBT_CLOUD_PROJECT_ID: 123 DBT_CLOUD_JOB_ID: ABC @@ -115,7 +115,7 @@ jobs: matrix: warehouse: ["snowflake", "bigquery", "postgres"] # When supporting a new version, update the list here - version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0"] + version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0", "1_7_0"] runs-on: ubuntu-latest environment: name: Approve Integration Tests diff --git a/.github/workflows/main_lint_package.yml b/.github/workflows/main_lint_package.yml index 7747f565..771d185f 100644 --- a/.github/workflows/main_lint_package.yml +++ b/.github/workflows/main_lint_package.yml @@ -46,7 +46,7 @@ jobs: architecture: "x64" - name: Install Python packages - run: python -m pip install dbt-snowflake~=1.6.0 sqlfluff-templater-dbt~=2.3.2 + run: python -m pip install dbt-snowflake~=1.7.0 sqlfluff-templater-dbt~=2.3.2 - name: Test database connection run: dbt debug diff --git a/.github/workflows/main_test_package.yml b/.github/workflows/main_test_package.yml index 4398c459..c8e503b5 100644 --- a/.github/workflows/main_test_package.yml +++ b/.github/workflows/main_test_package.yml @@ -35,7 +35,7 @@ jobs: strategy: matrix: warehouse: ["snowflake", "bigquery", "postgres"] - version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0"] + version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0", "1_7_0"] runs-on: ubuntu-latest permissions: contents: "read" diff --git a/.github/workflows/publish_docs_on_release.yml b/.github/workflows/publish_docs_on_release.yml index d558a464..b02ee057 100644 --- a/.github/workflows/publish_docs_on_release.yml +++ b/.github/workflows/publish_docs_on_release.yml @@ -39,7 +39,7 @@ jobs: uses: actions/checkout@v3 - name: Install Python packages - run: python -m pip install dbt-snowflake~=1.6.0 + run: python -m pip install dbt-snowflake~=1.7.0 - name: Test database connection run: dbt debug diff --git a/dbt_project.yml b/dbt_project.yml index ee9e85ca..7ddce641 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -1,7 +1,7 @@ name: "dbt_artifacts" version: "2.6.1" config-version: 2 -require-dbt-version: [">=1.3.0", "<1.7.0"] +require-dbt-version: [">=1.3.0", "<1.8.0"] profile: "dbt_artifacts" clean-targets: # folders to be removed by `dbt clean` diff --git a/tox.ini b/tox.ini index 8add76ba..542d6e21 100644 --- a/tox.ini +++ b/tox.ini @@ -36,7 +36,7 @@ rules = LT01,LT02,LT03,CP01,AL01,AL02,CP02,ST08,LT06,LT07,AM01,LT08,AL05,RF02,RF deps = sqlfluff-templater-dbt~=2.0.2 - dbt-snowflake~=1.6.0 + dbt-snowflake~=1.7.0 [sqlfluff:indentation] indent_unit = space @@ -114,13 +114,13 @@ commands = sqlfluff fix models --ignore parsing # Generate docs [testenv:generate_docs] -deps = dbt-snowflake~=1.6.0 +deps = dbt-snowflake~=1.7.0 commands = dbt docs generate --profiles-dir integration_test_project # Snowflake integration tests [testenv:integration_snowflake] changedir = integration_test_project -deps = dbt-snowflake~=1.6.0 +deps = dbt-snowflake~=1.7.0 commands = dbt clean dbt deps @@ -159,10 +159,18 @@ commands = dbt deps dbt build --target snowflake +[testenv:integration_snowflake_1_7_0] +changedir = integration_test_project +deps = dbt-snowflake~=1.7.0 +commands = + dbt clean + dbt deps + dbt build --target snowflake + # Databricks integration tests [testenv:integration_databricks] changedir = integration_test_project -deps = dbt-databricks~=1.6.0 +deps = dbt-databricks~=1.7.0 commands = dbt clean dbt deps @@ -200,10 +208,18 @@ commands = dbt deps dbt build --target databricks +[testenv:integration_databricks_1_7_0] +changedir = integration_test_project +deps = dbt-databricks~=1.7.0 +commands = + dbt clean + dbt deps + dbt build --target databricks + # Bigquery integration tests [testenv:integration_bigquery] changedir = integration_test_project -deps = dbt-bigquery~=1.6.0 +deps = dbt-bigquery~=1.7.0 commands = dbt clean dbt deps @@ -241,6 +257,14 @@ commands = dbt deps dbt build --target bigquery --vars '"my_var": "my value"' +[testenv:integration_bigquery_1_7_0] +changedir = integration_test_project +deps = dbt-bigquery~=1.7.0 +commands = + dbt clean + dbt deps + dbt build --target bigquery --vars '"my_var": "my value"' + # Spark integration test (disabled) [testenv:integration_spark] changedir = integration_test_project @@ -252,7 +276,7 @@ commands = [testenv:integration_postgres] changedir = integration_test_project -deps = dbt-postgres~=1.6.0 +deps = dbt-postgres~=1.7.0 commands = dbt clean dbt deps @@ -290,3 +314,12 @@ commands = dbt deps dbt build --target postgres +[testenv:integration_postgres_1_7_0] +changedir = integration_test_project +deps = dbt-postgres~=1.7.0 +commands = + dbt clean + dbt deps + dbt build --target postgres + + From 4220a9cea519c98a7d23ec8396d661c6fc8880da Mon Sep 17 00:00:00 2001 From: Gemma Down <52132406+glsdown@users.noreply.github.com> Date: Thu, 16 Nov 2023 12:40:12 +0000 Subject: [PATCH 05/21] Release v2.6.2 (#404) * Add support for 1.7 * Bump version number --- README.md | 2 +- dbt_project.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 6c537660..6350d743 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ See the generated [dbt docs site](https://brooklyn-data.github.io/dbt_artifacts/ ``` packages: - package: brooklyn-data/dbt_artifacts - version: 2.6.1 + version: 2.6.2 ``` :construction_worker: Make sure to fix at least the **minor** version, to avoid issues when a new release is open. See the notes on upgrading below for more detail. diff --git a/dbt_project.yml b/dbt_project.yml index 7ddce641..60bd2117 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -1,5 +1,5 @@ name: "dbt_artifacts" -version: "2.6.1" +version: "2.6.2" config-version: 2 require-dbt-version: [">=1.3.0", "<1.8.0"] profile: "dbt_artifacts" From dba29174d7a61d3e71beb8b9a9b6e96621f8dab6 Mon Sep 17 00:00:00 2001 From: Matt Lam Date: Thu, 30 Nov 2023 18:02:54 -0800 Subject: [PATCH 06/21] Fix: typo in upload_results to determine batch size --- macros/upload_results/upload_results.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/macros/upload_results/upload_results.sql b/macros/upload_results/upload_results.sql index fcadc199..4876b82c 100644 --- a/macros/upload_results/upload_results.sql +++ b/macros/upload_results/upload_results.sql @@ -19,7 +19,7 @@ {% set objects = dbt_artifacts.get_dataset_content(dataset) %} {# Upload in chunks to reduce query size #} - {% if dataset == 'model' %} + {% if dataset == 'models' %} {% set upload_limit = 50 if target.type == 'bigquery' else 100 %} {% else %} {% set upload_limit = 300 if target.type == 'bigquery' else 5000 %} From 2ee23c20941fbf59dac0708d459be24c08eb1c71 Mon Sep 17 00:00:00 2001 From: Mark Greenwood Date: Thu, 21 Dec 2023 16:23:26 +0000 Subject: [PATCH 07/21] Copy model object to leave original intact --- .../upload_models.sql | 81 ++++++++++--------- 1 file changed, 42 insertions(+), 39 deletions(-) diff --git a/macros/upload_individual_datasets/upload_models.sql b/macros/upload_individual_datasets/upload_models.sql index d8dc49fd..b5f9a8ec 100644 --- a/macros/upload_individual_datasets/upload_models.sql +++ b/macros/upload_individual_datasets/upload_models.sql @@ -24,22 +24,23 @@ {{ adapter.dispatch('parse_json', 'dbt_artifacts')(adapter.dispatch('column_identifier', 'dbt_artifacts')(15)) }} from values {% for model in models -%} - {% do model.pop('raw_code', None) %} + {% set model_copy = model.copy() -%} + {% do model_copy.pop('raw_code', None) %} ( '{{ invocation_id }}', {# command_invocation_id #} - '{{ model.unique_id }}', {# node_id #} + '{{ model_copy.unique_id }}', {# node_id #} '{{ run_started_at }}', {# run_started_at #} - '{{ model.database }}', {# database #} - '{{ model.schema }}', {# schema #} - '{{ model.name }}', {# name #} - '{{ tojson(model.depends_on.nodes) | replace('\\', '\\\\') }}', {# depends_on_nodes #} - '{{ model.package_name }}', {# package_name #} - '{{ model.original_file_path | replace('\\', '\\\\') }}', {# path #} - '{{ model.checksum.checksum | replace('\\', '\\\\') }}', {# checksum #} - '{{ model.config.materialized }}', {# materialization #} - '{{ tojson(model.tags) }}', {# tags #} - '{{ tojson(model.config.meta) | replace("\\", "\\\\") | replace("'","\\'") | replace('"', '\\"') }}', {# meta #} - '{{ model.alias }}', {# alias #} + '{{ model_copy.database }}', {# database #} + '{{ model_copy.schema }}', {# schema #} + '{{ model_copy.name }}', {# name #} + '{{ tojson(model_copy.depends_on.nodes) | replace('\\', '\\\\') }}', {# depends_on_nodes #} + '{{ model_copy.package_name }}', {# package_name #} + '{{ model_copy.original_file_path | replace('\\', '\\\\') }}', {# path #} + '{{ model_copy.checksum.checksum | replace('\\', '\\\\') }}', {# checksum #} + '{{ model_copy.config.materialized }}', {# materialization #} + '{{ tojson(model_copy.tags) }}', {# tags #} + '{{ tojson(model_copy.config.meta) | replace("\\", "\\\\") | replace("'","\\'") | replace('"', '\\"') }}', {# meta #} + '{{ model_copy.alias }}', {# alias #} {% if var('dbt_artifacts_exclude_all_results', false) %} null {% else %} @@ -59,22 +60,23 @@ {% if models != [] %} {% set model_values %} {% for model in models -%} - {% do model.pop('raw_code', None) %} + {% set model_copy = model.copy() -%} + {% do model_copy.pop('raw_code', None) %} ( '{{ invocation_id }}', {# command_invocation_id #} - '{{ model.unique_id }}', {# node_id #} + '{{ model_copy.unique_id }}', {# node_id #} '{{ run_started_at }}', {# run_started_at #} - '{{ model.database }}', {# database #} - '{{ model.schema }}', {# schema #} - '{{ model.name }}', {# name #} - {{ tojson(model.depends_on.nodes) }}, {# depends_on_nodes #} - '{{ model.package_name }}', {# package_name #} - '{{ model.original_file_path | replace('\\', '\\\\') }}', {# path #} - '{{ model.checksum.checksum | replace('\\', '\\\\') }}', {# checksum #} - '{{ model.config.materialized }}', {# materialization #} - {{ tojson(model.tags) }}, {# tags #} - {{ adapter.dispatch('parse_json', 'dbt_artifacts')(tojson(model.config.meta)) }}, {# meta #} - '{{ model.alias }}', {# alias #} + '{{ model_copy.database }}', {# database #} + '{{ model_copy.schema }}', {# schema #} + '{{ model_copy.name }}', {# name #} + {{ tojson(model_copy.depends_on.nodes) }}, {# depends_on_nodes #} + '{{ model_copy.package_name }}', {# package_name #} + '{{ model_copy.original_file_path | replace('\\', '\\\\') }}', {# path #} + '{{ model_copy.checksum.checksum | replace('\\', '\\\\') }}', {# checksum #} + '{{ model_copy.config.materialized }}', {# materialization #} + {{ tojson(model_copy.tags) }}, {# tags #} + {{ adapter.dispatch('parse_json', 'dbt_artifacts')(tojson(model_copy.config.meta)) }}, {# meta #} + '{{ model_copy.alias }}', {# alias #} {% if var('dbt_artifacts_exclude_all_results', false) %} null {% else %} @@ -94,22 +96,23 @@ {% if models != [] %} {% set model_values %} {% for model in models -%} - {% do model.pop('raw_code', None) %} + {% set model_copy = model.copy() -%} + {% do model_copy.pop('raw_code', None) %} ( '{{ invocation_id }}', {# command_invocation_id #} - '{{ model.unique_id }}', {# node_id #} + '{{ model_copy.unique_id }}', {# node_id #} '{{ run_started_at }}', {# run_started_at #} - '{{ model.database }}', {# database #} - '{{ model.schema }}', {# schema #} - '{{ model.name }}', {# name #} - '{{ tojson(model.depends_on.nodes) }}', {# depends_on_nodes #} - '{{ model.package_name }}', {# package_name #} - $${{ model.original_file_path | replace('\\', '\\\\') }}$$, {# path #} - '{{ model.checksum.checksum }}', {# checksum #} - '{{ model.config.materialized }}', {# materialization #} - '{{ tojson(model.tags) }}', {# tags #} - $${{ model.config.meta }}$$, {# meta #} - '{{ model.alias }}', {# alias #} + '{{ model_copy.database }}', {# database #} + '{{ model_copy.schema }}', {# schema #} + '{{ model_copy.name }}', {# name #} + '{{ tojson(model_copy.depends_on.nodes) }}', {# depends_on_nodes #} + '{{ model_copy.package_name }}', {# package_name #} + $${{ model_copy.original_file_path | replace('\\', '\\\\') }}$$, {# path #} + '{{ model_copy.checksum.checksum }}', {# checksum #} + '{{ model_copy.config.materialized }}', {# materialization #} + '{{ tojson(model_copy.tags) }}', {# tags #} + $${{ model_copy.config.meta }}$$, {# meta #} + '{{ model_copy.alias }}', {# alias #} {% if var('dbt_artifacts_exclude_all_results', false) %} null {% else %} From d71c39fa3e7a9082e19b0a920ecc01be9745fca2 Mon Sep 17 00:00:00 2001 From: Jared Rimmer <100997264+jared-rimmer@users.noreply.github.com> Date: Thu, 25 Jan 2024 11:28:38 +0000 Subject: [PATCH 08/21] Update macros/upload_results/upload_results.sql --- macros/upload_results/upload_results.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/macros/upload_results/upload_results.sql b/macros/upload_results/upload_results.sql index 4876b82c..114a667d 100644 --- a/macros/upload_results/upload_results.sql +++ b/macros/upload_results/upload_results.sql @@ -18,7 +18,7 @@ {# Get the results that need to be uploaded #} {% set objects = dbt_artifacts.get_dataset_content(dataset) %} - {# Upload in chunks to reduce query size #} + {# Upload in chunks to reduce the query size #} {% if dataset == 'models' %} {% set upload_limit = 50 if target.type == 'bigquery' else 100 %} {% else %} From b51347b9b7fdade59740f93c5f55c51135d14820 Mon Sep 17 00:00:00 2001 From: Mark Greenwood Date: Thu, 25 Jan 2024 12:08:10 +0000 Subject: [PATCH 09/21] replace model for include all results --- macros/upload_individual_datasets/upload_models.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/macros/upload_individual_datasets/upload_models.sql b/macros/upload_individual_datasets/upload_models.sql index b5f9a8ec..57b1caff 100644 --- a/macros/upload_individual_datasets/upload_models.sql +++ b/macros/upload_individual_datasets/upload_models.sql @@ -44,7 +44,7 @@ {% if var('dbt_artifacts_exclude_all_results', false) %} null {% else %} - '{{ tojson(model) | replace("\\", "\\\\") | replace("'","\\'") | replace('"', '\\"') }}' {# all_results #} + '{{ tojson(model_copy) | replace("\\", "\\\\") | replace("'","\\'") | replace('"', '\\"') }}' {# all_results #} {% endif %} ) {%- if not loop.last %},{%- endif %} From dff781bf18fdad10693e70cf50812ddfaba64c7a Mon Sep 17 00:00:00 2001 From: Mark Greenwood Date: Thu, 25 Jan 2024 12:10:26 +0000 Subject: [PATCH 10/21] replace model for include all results --- macros/upload_individual_datasets/upload_models.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/macros/upload_individual_datasets/upload_models.sql b/macros/upload_individual_datasets/upload_models.sql index 57b1caff..7570b06a 100644 --- a/macros/upload_individual_datasets/upload_models.sql +++ b/macros/upload_individual_datasets/upload_models.sql @@ -80,7 +80,7 @@ {% if var('dbt_artifacts_exclude_all_results', false) %} null {% else %} - {{ adapter.dispatch('parse_json', 'dbt_artifacts')(tojson(model) | replace("\\", "\\\\") | replace("'","\\'") | replace('"', '\\"')) }} {# all_results #} + {{ adapter.dispatch('parse_json', 'dbt_artifacts')(tojson(model_copy) | replace("\\", "\\\\") | replace("'","\\'") | replace('"', '\\"')) }} {# all_results #} {% endif %} ) {%- if not loop.last %},{%- endif %} @@ -116,7 +116,7 @@ {% if var('dbt_artifacts_exclude_all_results', false) %} null {% else %} - $${{ tojson(model) }}$$ {# all_results #} + $${{ tojson(model_copy) }}$$ {# all_results #} {% endif %} ) {%- if not loop.last %},{%- endif %} From 5715e734b46ad36dcf21a944b76b734c5d103b00 Mon Sep 17 00:00:00 2001 From: sp-tkerlavage <90415765+sp-tkerlavage@users.noreply.github.com> Date: Wed, 28 Feb 2024 21:24:57 -0500 Subject: [PATCH 11/21] * Implementation of get_test_executions_dml_sql has code that references `model.timing`. Local loop variable is `test`, not `model`. Revised code to use `test` instead of `model`. * Implemented snowflake_get_test_executions_dml_sql to support rows_affected. --- .../upload_test_executions.sql | 61 +++++++++++++++++-- 1 file changed, 57 insertions(+), 4 deletions(-) diff --git a/macros/upload_individual_datasets/upload_test_executions.sql b/macros/upload_individual_datasets/upload_test_executions.sql index ea3553ae..74223ce5 100644 --- a/macros/upload_individual_datasets/upload_test_executions.sql +++ b/macros/upload_individual_datasets/upload_test_executions.sql @@ -35,9 +35,9 @@ '{{ test.thread_id }}', {# thread_id #} '{{ test.status }}', {# status #} - {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% set compile_started_at = (test.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} - {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% set query_completed_at = (test.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ test.execution_time }}, {# total_node_runtime #} @@ -73,9 +73,9 @@ '{{ test.thread_id }}', {# thread_id #} '{{ test.status }}', {# status #} - {% set compile_started_at = (model.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% set compile_started_at = (test.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} - {% set query_completed_at = (model.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% set query_completed_at = (test.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ test.execution_time }}, {# total_node_runtime #} @@ -148,3 +148,56 @@ {{ return("") }} {% endif %} {% endmacro -%} + +{% macro snowflake_get_test_executions_dml_sql(tests) -%} + {% if tests != [] %} + {% set test_execution_values %} + select + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(1) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(2) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(3) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(4) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(5) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(6) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(7) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(8) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(9) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(10) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(11) }}, + {{ adapter.dispatch('column_identifier', 'dbt_artifacts')(12) }}, + {{ adapter.dispatch('parse_json', 'dbt_artifacts')(adapter.dispatch('column_identifier', 'dbt_artifacts')(13)) }} + from values + {% for test in tests -%} + ( + '{{ invocation_id }}', {# command_invocation_id #} + '{{ test.node.unique_id }}', {# node_id #} + '{{ run_started_at }}', {# run_started_at #} + + {% set config_full_refresh = test.node.config.full_refresh %} + {% if config_full_refresh is none %} + {% set config_full_refresh = flags.FULL_REFRESH %} + {% endif %} + '{{ config_full_refresh }}', {# was_full_refresh #} + + '{{ test.thread_id }}', {# thread_id #} + '{{ test.status }}', {# status #} + + {% set compile_started_at = (test.timing | selectattr("name", "eq", "compile") | first | default({}))["started_at"] %} + {% if compile_started_at %}'{{ compile_started_at }}'{% else %}null{% endif %}, {# compile_started_at #} + {% set query_completed_at = (test.timing | selectattr("name", "eq", "execute") | first | default({}))["completed_at"] %} + {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} + + {{ test.execution_time }}, {# total_node_runtime #} + try_cast('{{ model.adapter_response.rows_affected }}' as int), {# rows_affected #} + {{ 'null' if test.failures is none else test.failures }}, {# failures #} + '{{ test.message | replace("\\", "\\\\") | replace("'", "\\'") | replace('"', '\\"') }}', {# message #} + '{{ tojson(test.adapter_response) | replace("\\", "\\\\") | replace("'", "\\'") | replace('"', '\\"') }}' {# adapter_response #} + ) + {%- if not loop.last %},{%- endif %} + {%- endfor %} + {% endset %} + {{ test_execution_values }} + {% else %} + {{ return("") }} + {% endif %} +{% endmacro -%} \ No newline at end of file From d40947cffabbab3048317070c7c1a6431e650722 Mon Sep 17 00:00:00 2001 From: Stout Date: Wed, 15 May 2024 23:43:56 -0400 Subject: [PATCH 12/21] upgrade to 1.8 --- .github/workflows/ci_lint_package.yml | 2 +- .github/workflows/main_lint_package.yml | 2 +- .github/workflows/publish_docs_on_release.yml | 2 +- tox.ini | 20 +++++++++---------- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/workflows/ci_lint_package.yml b/.github/workflows/ci_lint_package.yml index 2563e0cf..70a89edd 100644 --- a/.github/workflows/ci_lint_package.yml +++ b/.github/workflows/ci_lint_package.yml @@ -50,7 +50,7 @@ jobs: architecture: "x64" - name: Install Python packages - run: python -m pip install dbt-snowflake~=1.7.0 sqlfluff-templater-dbt~=2.3.2 + run: python -m pip install dbt-snowflake~=1.8.0 sqlfluff-templater-dbt~=2.3.2 - name: Test database connection run: dbt debug diff --git a/.github/workflows/main_lint_package.yml b/.github/workflows/main_lint_package.yml index 771d185f..fe21bc1b 100644 --- a/.github/workflows/main_lint_package.yml +++ b/.github/workflows/main_lint_package.yml @@ -46,7 +46,7 @@ jobs: architecture: "x64" - name: Install Python packages - run: python -m pip install dbt-snowflake~=1.7.0 sqlfluff-templater-dbt~=2.3.2 + run: python -m pip install dbt-snowflake~=1.8.0 sqlfluff-templater-dbt~=2.3.2 - name: Test database connection run: dbt debug diff --git a/.github/workflows/publish_docs_on_release.yml b/.github/workflows/publish_docs_on_release.yml index b02ee057..663486ff 100644 --- a/.github/workflows/publish_docs_on_release.yml +++ b/.github/workflows/publish_docs_on_release.yml @@ -39,7 +39,7 @@ jobs: uses: actions/checkout@v3 - name: Install Python packages - run: python -m pip install dbt-snowflake~=1.7.0 + run: python -m pip install dbt-snowflake~=1.8.0 - name: Test database connection run: dbt debug diff --git a/tox.ini b/tox.ini index 542d6e21..729ab521 100644 --- a/tox.ini +++ b/tox.ini @@ -36,7 +36,7 @@ rules = LT01,LT02,LT03,CP01,AL01,AL02,CP02,ST08,LT06,LT07,AM01,LT08,AL05,RF02,RF deps = sqlfluff-templater-dbt~=2.0.2 - dbt-snowflake~=1.7.0 + dbt-snowflake~=1.8.0 [sqlfluff:indentation] indent_unit = space @@ -114,13 +114,13 @@ commands = sqlfluff fix models --ignore parsing # Generate docs [testenv:generate_docs] -deps = dbt-snowflake~=1.7.0 +deps = dbt-snowflake~=1.8.0 commands = dbt docs generate --profiles-dir integration_test_project # Snowflake integration tests [testenv:integration_snowflake] changedir = integration_test_project -deps = dbt-snowflake~=1.7.0 +deps = dbt-snowflake~=1.8.0 commands = dbt clean dbt deps @@ -161,7 +161,7 @@ commands = [testenv:integration_snowflake_1_7_0] changedir = integration_test_project -deps = dbt-snowflake~=1.7.0 +deps = dbt-snowflake~=1.8.0 commands = dbt clean dbt deps @@ -170,7 +170,7 @@ commands = # Databricks integration tests [testenv:integration_databricks] changedir = integration_test_project -deps = dbt-databricks~=1.7.0 +deps = dbt-databricks~=1.8.0 commands = dbt clean dbt deps @@ -210,7 +210,7 @@ commands = [testenv:integration_databricks_1_7_0] changedir = integration_test_project -deps = dbt-databricks~=1.7.0 +deps = dbt-databricks~=1.8.0 commands = dbt clean dbt deps @@ -219,7 +219,7 @@ commands = # Bigquery integration tests [testenv:integration_bigquery] changedir = integration_test_project -deps = dbt-bigquery~=1.7.0 +deps = dbt-bigquery~=1.8.0 commands = dbt clean dbt deps @@ -259,7 +259,7 @@ commands = [testenv:integration_bigquery_1_7_0] changedir = integration_test_project -deps = dbt-bigquery~=1.7.0 +deps = dbt-bigquery~=1.8.0 commands = dbt clean dbt deps @@ -276,7 +276,7 @@ commands = [testenv:integration_postgres] changedir = integration_test_project -deps = dbt-postgres~=1.7.0 +deps = dbt-postgres~=1.8.0 commands = dbt clean dbt deps @@ -316,7 +316,7 @@ commands = [testenv:integration_postgres_1_7_0] changedir = integration_test_project -deps = dbt-postgres~=1.7.0 +deps = dbt-postgres~=1.8.0 commands = dbt clean dbt deps From 4c5372d466b011fca4593d688b26b3771da8242a Mon Sep 17 00:00:00 2001 From: Stout Date: Wed, 15 May 2024 23:52:25 -0400 Subject: [PATCH 13/21] upgrade --- .github/workflows/ci_test_package.yml | 4 +-- .github/workflows/main_test_package.yml | 2 +- tox.ini | 34 ++++++++++++++++++++++++- 3 files changed, 36 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci_test_package.yml b/.github/workflows/ci_test_package.yml index 034a316f..06c07e92 100644 --- a/.github/workflows/ci_test_package.yml +++ b/.github/workflows/ci_test_package.yml @@ -21,7 +21,7 @@ env: DBT_ENV_SECRET_DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} DBT_ENV_SECRET_GCP_PROJECT: ${{ secrets.GCP_PROJECT }} # Env var to test version - LAST_RELEASE_SUPPORTED_DBT_VERSION: 1_7_0 # A dbt version supported by both the last release and this one + LAST_RELEASE_SUPPORTED_DBT_VERSION: 1_8_0 # A dbt version supported by both the last release and this one # Env vars to test invocations model DBT_CLOUD_PROJECT_ID: 123 DBT_CLOUD_JOB_ID: ABC @@ -115,7 +115,7 @@ jobs: matrix: warehouse: ["snowflake", "bigquery", "postgres"] # When supporting a new version, update the list here - version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0", "1_7_0"] + version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0", "1_7_0", "1_8_0"] runs-on: ubuntu-latest environment: name: Approve Integration Tests diff --git a/.github/workflows/main_test_package.yml b/.github/workflows/main_test_package.yml index c8e503b5..12a9c957 100644 --- a/.github/workflows/main_test_package.yml +++ b/.github/workflows/main_test_package.yml @@ -35,7 +35,7 @@ jobs: strategy: matrix: warehouse: ["snowflake", "bigquery", "postgres"] - version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0", "1_7_0"] + version: ["1_3_0", "1_4_0", "1_5_0", "1_6_0", "1_7_0", "1_8_0"] runs-on: ubuntu-latest permissions: contents: "read" diff --git a/tox.ini b/tox.ini index 729ab521..10880752 100644 --- a/tox.ini +++ b/tox.ini @@ -161,6 +161,14 @@ commands = [testenv:integration_snowflake_1_7_0] changedir = integration_test_project +deps = dbt-snowflake~=1.7.0 +commands = + dbt clean + dbt deps + dbt build --target snowflake + +[testenv:integration_snowflake_1_8_0] +changedir = integration_test_project deps = dbt-snowflake~=1.8.0 commands = dbt clean @@ -210,6 +218,14 @@ commands = [testenv:integration_databricks_1_7_0] changedir = integration_test_project +deps = dbt-databricks~=1.7.0 +commands = + dbt clean + dbt deps + dbt build --target databricks + +[testenv:integration_databricks_1_8_0] +changedir = integration_test_project deps = dbt-databricks~=1.8.0 commands = dbt clean @@ -259,6 +275,14 @@ commands = [testenv:integration_bigquery_1_7_0] changedir = integration_test_project +deps = dbt-bigquery~=1.7.0 +commands = + dbt clean + dbt deps + dbt build --target bigquery --vars '"my_var": "my value"' + +[testenv:integration_bigquery_1_8_0] +changedir = integration_test_project deps = dbt-bigquery~=1.8.0 commands = dbt clean @@ -316,7 +340,15 @@ commands = [testenv:integration_postgres_1_7_0] changedir = integration_test_project -deps = dbt-postgres~=1.8.0 +deps = dbt-postgres~=1.7.0 +commands = + dbt clean + dbt deps + dbt build --target postgres + +[testenv:integration_postgres_1_8_0] +changedir = integration_test_project +deps = dbt-postgres~=1.7.0 commands = dbt clean dbt deps From de794836fd06fc6f994f870d2b3b99a3152fac3e Mon Sep 17 00:00:00 2001 From: Stout Date: Wed, 15 May 2024 23:53:27 -0400 Subject: [PATCH 14/21] upgrade --- dbt_project.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dbt_project.yml b/dbt_project.yml index 60bd2117..8bfc6192 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -1,7 +1,7 @@ name: "dbt_artifacts" version: "2.6.2" config-version: 2 -require-dbt-version: [">=1.3.0", "<1.8.0"] +require-dbt-version: [">=1.3.0", "<1.9.0"] profile: "dbt_artifacts" clean-targets: # folders to be removed by `dbt clean` From d395fe1e340018b6ea4d7c0e6f045fdfe33fefc1 Mon Sep 17 00:00:00 2001 From: James Colvin <7504815+jecolvin@users.noreply.github.com> Date: Thu, 16 May 2024 10:41:14 -0400 Subject: [PATCH 15/21] Update ci_lint_package.yml --- .github/workflows/ci_lint_package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci_lint_package.yml b/.github/workflows/ci_lint_package.yml index 70a89edd..e2d33b7d 100644 --- a/.github/workflows/ci_lint_package.yml +++ b/.github/workflows/ci_lint_package.yml @@ -50,7 +50,7 @@ jobs: architecture: "x64" - name: Install Python packages - run: python -m pip install dbt-snowflake~=1.8.0 sqlfluff-templater-dbt~=2.3.2 + run: python -m pip install dbt-snowflake~=1.8.0 sqlfluff-templater-dbt~=3.0.0 - name: Test database connection run: dbt debug From 39e2a235fba65c5283d9ea78467fc85fac708414 Mon Sep 17 00:00:00 2001 From: James Colvin <7504815+jecolvin@users.noreply.github.com> Date: Thu, 16 May 2024 10:42:13 -0400 Subject: [PATCH 16/21] Update main_lint_package.yml --- .github/workflows/main_lint_package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main_lint_package.yml b/.github/workflows/main_lint_package.yml index fe21bc1b..bd691804 100644 --- a/.github/workflows/main_lint_package.yml +++ b/.github/workflows/main_lint_package.yml @@ -46,7 +46,7 @@ jobs: architecture: "x64" - name: Install Python packages - run: python -m pip install dbt-snowflake~=1.8.0 sqlfluff-templater-dbt~=2.3.2 + run: python -m pip install dbt-snowflake~=1.8.0 sqlfluff-templater-dbt~=3.0.0 - name: Test database connection run: dbt debug From 7aeb454b6434f6903ec2ecab3350d9cb419dff66 Mon Sep 17 00:00:00 2001 From: James Colvin <7504815+jecolvin@users.noreply.github.com> Date: Fri, 17 May 2024 09:42:44 -0400 Subject: [PATCH 17/21] Update dbt_project.yml --- dbt_project.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dbt_project.yml b/dbt_project.yml index 8bfc6192..6ea15bba 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -1,5 +1,5 @@ name: "dbt_artifacts" -version: "2.6.2" +version: "2.6.3" config-version: 2 require-dbt-version: [">=1.3.0", "<1.9.0"] profile: "dbt_artifacts" From 598583197a56927e180ef94d3c0ce18878e3d7f5 Mon Sep 17 00:00:00 2001 From: James Colvin <7504815+jecolvin@users.noreply.github.com> Date: Fri, 17 May 2024 09:43:37 -0400 Subject: [PATCH 18/21] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6350d743..166d708a 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ See the generated [dbt docs site](https://brooklyn-data.github.io/dbt_artifacts/ ``` packages: - package: brooklyn-data/dbt_artifacts - version: 2.6.2 + version: 2.6.3 ``` :construction_worker: Make sure to fix at least the **minor** version, to avoid issues when a new release is open. See the notes on upgrading below for more detail. From 17060c0b776c23b35be3996f514f33ed6439e328 Mon Sep 17 00:00:00 2001 From: michelley-an <101600153+michelley-an@users.noreply.github.com> Date: Mon, 12 Aug 2024 06:42:36 -0700 Subject: [PATCH 19/21] Update macros/upload_individual_datasets/upload_test_executions.sql Co-authored-by: Laura Li Foa Wing <100870296+llifoawing@users.noreply.github.com> --- macros/upload_individual_datasets/upload_test_executions.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/macros/upload_individual_datasets/upload_test_executions.sql b/macros/upload_individual_datasets/upload_test_executions.sql index 74223ce5..5401d4e7 100644 --- a/macros/upload_individual_datasets/upload_test_executions.sql +++ b/macros/upload_individual_datasets/upload_test_executions.sql @@ -149,7 +149,7 @@ {% endif %} {% endmacro -%} -{% macro snowflake_get_test_executions_dml_sql(tests) -%} +{% macro snowflake__get_test_executions_dml_sql(tests) -%} {% if tests != [] %} {% set test_execution_values %} select From a0b041759db7642e70f400f172d2a69aea2db3f3 Mon Sep 17 00:00:00 2001 From: michelley-an <101600153+michelley-an@users.noreply.github.com> Date: Mon, 12 Aug 2024 06:42:45 -0700 Subject: [PATCH 20/21] Update macros/upload_individual_datasets/upload_test_executions.sql Co-authored-by: Laura Li Foa Wing <100870296+llifoawing@users.noreply.github.com> --- macros/upload_individual_datasets/upload_test_executions.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/macros/upload_individual_datasets/upload_test_executions.sql b/macros/upload_individual_datasets/upload_test_executions.sql index 5401d4e7..cb13288b 100644 --- a/macros/upload_individual_datasets/upload_test_executions.sql +++ b/macros/upload_individual_datasets/upload_test_executions.sql @@ -188,7 +188,7 @@ {% if query_completed_at %}'{{ query_completed_at }}'{% else %}null{% endif %}, {# query_completed_at #} {{ test.execution_time }}, {# total_node_runtime #} - try_cast('{{ model.adapter_response.rows_affected }}' as int), {# rows_affected #} + try_cast('{{ test.adapter_response.rows_affected }}' as int), {# rows_affected #} {{ 'null' if test.failures is none else test.failures }}, {# failures #} '{{ test.message | replace("\\", "\\\\") | replace("'", "\\'") | replace('"', '\\"') }}', {# message #} '{{ tojson(test.adapter_response) | replace("\\", "\\\\") | replace("'", "\\'") | replace('"', '\\"') }}' {# adapter_response #} From f1c1c03f79c835124420494e7247fc83f5b97150 Mon Sep 17 00:00:00 2001 From: Michelle Yan Date: Wed, 14 Aug 2024 11:41:28 -0400 Subject: [PATCH 21/21] update version --- README.md | 2 +- dbt_project.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 166d708a..93c25f40 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ See the generated [dbt docs site](https://brooklyn-data.github.io/dbt_artifacts/ ``` packages: - package: brooklyn-data/dbt_artifacts - version: 2.6.3 + version: 2.6.4 ``` :construction_worker: Make sure to fix at least the **minor** version, to avoid issues when a new release is open. See the notes on upgrading below for more detail. diff --git a/dbt_project.yml b/dbt_project.yml index 6ea15bba..684efce6 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -1,5 +1,5 @@ name: "dbt_artifacts" -version: "2.6.3" +version: "2.6.4" config-version: 2 require-dbt-version: [">=1.3.0", "<1.9.0"] profile: "dbt_artifacts"