From 53667f9cb1509fa74ce29badb6678657f743eda5 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 22 Nov 2023 16:15:34 -0600 Subject: [PATCH 1/8] bug/databricks-get-url-parameter --- macros/extract_uri_parameter.sql | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 macros/extract_uri_parameter.sql diff --git a/macros/extract_uri_parameter.sql b/macros/extract_uri_parameter.sql new file mode 100644 index 0000000..8e17c8b --- /dev/null +++ b/macros/extract_uri_parameter.sql @@ -0,0 +1,20 @@ +{% macro extract_uri_parameter(field, uri_parameter) -%} + +{{ adapter.dispatch('extract_uri_parameter', 'fivetran_utils') (field, uri_parameter) }} + +{% endmacro %} + + +{% macro default__extract_uri_parameter(field, uri_parameter) -%} + +{{ dbt_utils.get_url_parameter(field, uri_parameter) }} + +{%- endmacro %} + + +{% macro databricks__extract_uri_parameter(field, uri_parameter) -%} + +{%- set formatted_uri_parameter = "'" + uri_parameter + "=([^&]+)'" -%} +nullif(regexp_extract({{ field }}, {{ formatted_uri_parameter }}, 1), '') + +{%- endmacro %} \ No newline at end of file From 879196208350bda402cfa6cb0fcf7a8ff07671b5 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 22 Nov 2023 16:23:54 -0600 Subject: [PATCH 2/8] update pkgs and reqs --- integration_tests/packages.yml | 16 ++++------------ integration_tests/requirements.txt | 4 +--- 2 files changed, 5 insertions(+), 15 deletions(-) diff --git a/integration_tests/packages.yml b/integration_tests/packages.yml index f15766b..976ad3a 100644 --- a/integration_tests/packages.yml +++ b/integration_tests/packages.yml @@ -2,12 +2,8 @@ packages: - package: fivetran/ad_reporting version: [">=1.0.0", "<2.0.0"] - # - package: fivetran/shopify_holistic_reporting - # version: [">=0.1.0", "<1.0.0"] - # put this back when package versions updated in shopify_holistic_reporting - - git: https://github.com/fivetran/dbt_shopify_holistic_reporting.git - revision: test/update-shopify-versions - warn-unpinned: false + - package: fivetran/shopify_holistic_reporting + version: [">=0.1.0", "<1.0.0"] - package: fivetran/social_media_reporting version: [">=0.1.0", "<1.0.0"] @@ -76,9 +72,5 @@ packages: version: [">=0.1.0", "<1.0.0"] - package: fivetran/zendesk version: [">=0.1.0", "<1.0.0"] - # - package: fivetran/intercom - # version: [">=0.1.0", "<1.0.0"] - # put this back when package versions updated in intercom_source - - git: https://github.com/fivetran/dbt_intercom.git - revision: test/calogica_version_update - warn-unpinned: false \ No newline at end of file + - package: fivetran/intercom + version: [">=0.1.0", "<1.0.0"] \ No newline at end of file diff --git a/integration_tests/requirements.txt b/integration_tests/requirements.txt index b7702d6..810bae1 100644 --- a/integration_tests/requirements.txt +++ b/integration_tests/requirements.txt @@ -4,6 +4,4 @@ dbt-redshift>=1.3.0,<2.0.0 dbt-postgres>=1.3.0,<2.0.0 dbt-spark>=1.3.0,<2.0.0 dbt-spark[PyHive]>=1.3.0,<2.0.0 -dbt-databricks>=1.3.0,<2.0.0 - -oscrypto @ git+https://github.com/wbond/oscrypto.git@d5f3437 \ No newline at end of file +dbt-databricks>=1.6.0,<2.0.0 \ No newline at end of file From c9618b8be985a501df8ac9c12cfd7436c89b86a0 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 22 Nov 2023 16:25:18 -0600 Subject: [PATCH 3/8] update yml --- .buildkite/pipeline.yml | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 4a32060..4ab4d35 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -59,7 +59,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift linkedin apple_search_ads snapchat_ads facebook_ads - - label: ":bricks: Run Tests - Ads Packages :bookmark: :one:" + - label: ":databricks: Run Tests - Ads Packages :bookmark: :one:" key: "run_dbt_databricks_ads_1" plugins: - docker#v3.13.0: @@ -133,7 +133,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift tiktok_ads twitter_ads amazon_ads reddit_ads - - label: ":bricks: Run Tests - Ads Packages :bookmark: :two:" + - label: ":databricks: Run Tests - Ads Packages :bookmark: :two:" key: "run_dbt_databricks_ads_2" plugins: - docker#v3.13.0: @@ -210,7 +210,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift google_ads microsoft_ads pinterest - - label: ":bricks: Run Tests - Ads Packages :bookmark: :three:" + - label: ":databricks: Run Tests - Ads Packages :bookmark: :three:" key: "run_dbt_databricks_ads_3" plugins: - docker#v3.13.0: @@ -287,7 +287,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift linkedin_pages facebook_pages instagram_business twitter_organic youtube_analytics - - label: ":bricks: Run Tests - Social Packages :iphone:" + - label: ":databricks: Run Tests - Social Packages :iphone:" key: "run_dbt_databricks_social" plugins: - docker#v3.13.0: @@ -364,7 +364,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift google_play apple_store - - label: ":bricks: Run Tests - App Packages :video_game:" + - label: ":databricks: Run Tests - App Packages :video_game:" key: "run_dbt_databricks_app" plugins: - docker#v3.13.0: @@ -441,7 +441,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift salesforce hubspot - - label: ":bricks: Run Tests - CRM Packages :salesforce::hubspot:" + - label: ":databricks: Run Tests - CRM Packages :salesforce::hubspot:" key: "run_dbt_databricks_crm" plugins: - docker#v3.13.0: @@ -518,7 +518,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift shopify recurly stripe zuora - - label: ":bricks: Run Tests - Ecommerce and Subscription Packages :shopping_bags:" + - label: ":databricks: Run Tests - Ecommerce and Subscription Packages :shopping_bags:" key: "run_dbt_databricks_esub" plugins: - docker#v3.13.0: @@ -595,7 +595,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift marketo iterable klaviyo mailchimp pardot - - label: ":bricks: Run Tests - Email Marketing Packages :email:" + - label: ":databricks: Run Tests - Email Marketing Packages :email:" key: "run_dbt_databricks_email" plugins: - docker#v3.13.0: @@ -673,7 +673,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift quickbooks xero sage_intacct netsuite - - label: ":bricks: Run Tests - Finance Packages :money_with_wings:" + - label: ":databricks: Run Tests - Finance Packages :money_with_wings:" key: "run_dbt_databricks_finance" plugins: - docker#v3.13.0: @@ -751,7 +751,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift fivetran_log greenhouse lever - - label: ":bricks: Run Tests - Log and People Packages :people_hugging:" + - label: ":databricks: Run Tests - Log and People Packages :people_hugging:" key: "run_dbt_databricks_log_people" plugins: - docker#v3.13.0: @@ -829,7 +829,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift amplitude mixpanel pendo - - label: ":bricks: Run Tests - Product Packages :unicorn_face:" + - label: ":databricks: Run Tests - Product Packages :unicorn_face:" key: "run_dbt_databricks_product" plugins: - docker#v3.13.0: @@ -906,7 +906,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift intercom github zendesk asana jira - - label: ":bricks: Run Tests - Velocity Packages :roller_coaster:" + - label: ":databricks: Run Tests - Velocity Packages :roller_coaster:" key: "run_dbt_databricks_velocity" plugins: - docker#v3.13.0: @@ -984,7 +984,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift ad_reporting - - label: ":bricks: Run Tests - Ad Reporting Roll Up Package :roller_skate:" + - label: ":databricks: Run Tests - Ad Reporting Roll Up Package :roller_skate:" key: "run_dbt_databricks_ad_roll" plugins: - docker#v3.13.0: @@ -1061,7 +1061,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift app_reporting - - label: ":bricks: Run Tests - App Reporting Roll Up Package :roller_skate:" + - label: ":databricks: Run Tests - App Reporting Roll Up Package :roller_skate:" key: "run_dbt_databricks_app_roll" plugins: - docker#v3.13.0: @@ -1138,7 +1138,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift social_media_reporting - - label: ":bricks: Run Tests - Social Reporting Roll Up Package :roller_skate:" + - label: ":databricks: Run Tests - Social Reporting Roll Up Package :roller_skate:" key: "run_dbt_databricks_social_roll" plugins: - docker#v3.13.0: @@ -1215,7 +1215,7 @@ steps: commands: | bash .buildkite/scripts/run_standard_models.sh redshift shopify_holistic_reporting - - label: ":bricks: Run Tests - Shopify Holistic Reporting Roll Up Package :roller_skate:" + - label: ":databricks: Run Tests - Shopify Holistic Reporting Roll Up Package :roller_skate:" key: "run_dbt_databricks_shopify_holistic_roll" plugins: - docker#v3.13.0: From 61212f747d5e6abd55cb0077c856c4e45f764057 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Mon, 27 Nov 2023 17:07:28 -0600 Subject: [PATCH 4/8] update changelog & readme --- CHANGELOG.md | 5 +++++ README.md | 12 ++++++++++++ macros/extract_uri_parameter.sql | 20 -------------------- macros/extract_url_parameter.sql | 20 ++++++++++++++++++++ 4 files changed, 37 insertions(+), 20 deletions(-) delete mode 100644 macros/extract_uri_parameter.sql create mode 100644 macros/extract_url_parameter.sql diff --git a/CHANGELOG.md b/CHANGELOG.md index 5330c49..4194eae 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +# dbt_fivetran_utils v0.4.9 +[PR #130](https://github.com/fivetran/dbt_fivetran_utils/pull/130) includes the following updates. +## Feature Updates +- Added macro `extract_url_parameter` to create special logic for Databricks instances not supported by `dbt_utils.get_url_parameter()`. The macro uses `dbt_utils.get_url_parameter()` for default, non-Databricks targets. See README for more details. + # dbt_fivetran_utils v0.4.8 [PR #127](https://github.com/fivetran/dbt_fivetran_utils/pull/127) includes the following updates. ## Feature Updates diff --git a/README.md b/README.md index 923ce1d..f70a7d8 100644 --- a/README.md +++ b/README.md @@ -59,6 +59,7 @@ dispatch: - [array\_agg (source)](#array_agg-source) - [ceiling (source)](#ceiling-source) - [first\_value (source)](#first_value-source) + - [extract\_url\_parameter (source)](#extract-url-parameter-source) - [json\_extract (source)](#json_extract-source) - [json\_parse (source)](#json_parse-source) - [max\_bool (source)](#max_bool-source) @@ -186,7 +187,18 @@ This macro returns the value_expression for the first row in the current window * `order` (optional): The order of which you want to partition the window frame. The order argument by default is `asc`. If you wish to get the last_value, you may change the argument to `desc`. ---- +### extract_url_parameter ([source](macros/extract_url_parameter.sql)) +This macro extracts a url parameter from a column containing a url. It is an expansion of `dbt_utils.get_url_parameter()` to add support for Databricks SQL. +**Usage:** +```sql +{{ fivetran_utils.extract_url_parameter(field="url_field", url_parameter="utm_source") }} +``` +**Args:** +* `field` (required): The name of the column containing the url. +* `url_parameter` (required): The parameter you want to extract. + +---- ### json_extract ([source](macros/json_extract.sql)) This macro allows for cross database use of the json extract function. The json extract allows the return of data from a json object. The data is returned by the path you provide as the argument. The json_extract macro is compatible with BigQuery, Redshift, Postgres, and Snowflake. diff --git a/macros/extract_uri_parameter.sql b/macros/extract_uri_parameter.sql deleted file mode 100644 index 8e17c8b..0000000 --- a/macros/extract_uri_parameter.sql +++ /dev/null @@ -1,20 +0,0 @@ -{% macro extract_uri_parameter(field, uri_parameter) -%} - -{{ adapter.dispatch('extract_uri_parameter', 'fivetran_utils') (field, uri_parameter) }} - -{% endmacro %} - - -{% macro default__extract_uri_parameter(field, uri_parameter) -%} - -{{ dbt_utils.get_url_parameter(field, uri_parameter) }} - -{%- endmacro %} - - -{% macro databricks__extract_uri_parameter(field, uri_parameter) -%} - -{%- set formatted_uri_parameter = "'" + uri_parameter + "=([^&]+)'" -%} -nullif(regexp_extract({{ field }}, {{ formatted_uri_parameter }}, 1), '') - -{%- endmacro %} \ No newline at end of file diff --git a/macros/extract_url_parameter.sql b/macros/extract_url_parameter.sql new file mode 100644 index 0000000..a6421e8 --- /dev/null +++ b/macros/extract_url_parameter.sql @@ -0,0 +1,20 @@ +{% macro extract_url_parameter(field, url_parameter) -%} + +{{ adapter.dispatch('extract_url_parameter', 'fivetran_utils') (field, url_parameter) }} + +{% endmacro %} + + +{% macro default__extract_url_parameter(field, url_parameter) -%} + +{{ dbt_utils.get_url_parameter(field, url_parameter) }} + +{%- endmacro %} + + +{% macro databricks__extract_url_parameter(field, url_parameter) -%} + +{%- set formatted_url_parameter = "'" + url_parameter + "=([^&]+)'" -%} +nullif(regexp_extract({{ field }}, {{ formatted_url_parameter }}, 1), '') + +{%- endmacro %} \ No newline at end of file From 513fb443a5227ae8a78d93cdc46b44e09d0bf8ca Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Mon, 27 Nov 2023 17:53:45 -0600 Subject: [PATCH 5/8] update changelog & readme --- CHANGELOG.md | 2 +- README.md | 30 +++++++++++++++--------------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4194eae..6f9e0f2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,7 @@ # dbt_fivetran_utils v0.4.9 [PR #130](https://github.com/fivetran/dbt_fivetran_utils/pull/130) includes the following updates. ## Feature Updates -- Added macro `extract_url_parameter` to create special logic for Databricks instances not supported by `dbt_utils.get_url_parameter()`. The macro uses `dbt_utils.get_url_parameter()` for default, non-Databricks targets. See README for more details. +- Added macro `extract_url_parameter` to create special logic for Databricks instances not supported by `dbt_utils.get_url_parameter()`. The macro uses `dbt_utils.get_url_parameter()` for default, non-Databricks targets. See [README](https://github.com/fivetran/dbt_fivetran_utils/blob/releases/v0.4.latest/README.md#extract_url_parameter-source) for more details. # dbt_fivetran_utils v0.4.8 [PR #127](https://github.com/fivetran/dbt_fivetran_utils/pull/127) includes the following updates. diff --git a/README.md b/README.md index f70a7d8..b8dd32e 100644 --- a/README.md +++ b/README.md @@ -58,8 +58,8 @@ dispatch: - [Cross-database compatibility](#cross-database-compatibility) - [array\_agg (source)](#array_agg-source) - [ceiling (source)](#ceiling-source) + - [extract\_url\_parameter (source)](#extract_url_parameter-source) - [first\_value (source)](#first_value-source) - - [extract\_url\_parameter (source)](#extract-url-parameter-source) - [json\_extract (source)](#json_extract-source) - [json\_parse (source)](#json_parse-source) - [max\_bool (source)](#max_bool-source) @@ -172,20 +172,6 @@ than, or equal to, the specified numeric expression. The ceiling macro is compat **Args:** * `num` (required): The integer field you wish to apply the ceiling function. ----- -### first_value ([source](macros/first_value.sql)) -This macro returns the value_expression for the first row in the current window frame with cross db functionality. This macro ignores null values. The default first_value calculation within the macro is the `first_value` function. The Redshift first_value calculation is the `first_value` function, with the inclusion of a frame_clause `{{ partition_field }} rows unbounded preceding`. - -**Usage:** -```sql -{{ fivetran_utils.first_value(first_value_field="created_at", partition_field="id", order_by_field="created_at", order="asc") }} -``` -**Args:** -* `first_value_field` (required): The value expression which you want to determine the first value for. -* `partition_field` (required): Name of the field you want to partition by to determine the first_value. -* `order_by_field` (required): Name of the field you wish to sort on to determine the first_value. -* `order` (optional): The order of which you want to partition the window frame. The order argument by default is `asc`. If you wish to get the last_value, you may change the argument to `desc`. - ---- ### extract_url_parameter ([source](macros/extract_url_parameter.sql)) This macro extracts a url parameter from a column containing a url. It is an expansion of `dbt_utils.get_url_parameter()` to add support for Databricks SQL. @@ -211,6 +197,20 @@ The data is returned by the path you provide as the argument. The json_extract m * `string` (required): Name of the field which contains the json object. * `string_path` (required): Name of the path in the json object which you want to extract the data from. +---- +### first_value ([source](macros/first_value.sql)) +This macro returns the value_expression for the first row in the current window frame with cross db functionality. This macro ignores null values. The default first_value calculation within the macro is the `first_value` function. The Redshift first_value calculation is the `first_value` function, with the inclusion of a frame_clause `{{ partition_field }} rows unbounded preceding`. + +**Usage:** +```sql +{{ fivetran_utils.first_value(first_value_field="created_at", partition_field="id", order_by_field="created_at", order="asc") }} +``` +**Args:** +* `first_value_field` (required): The value expression which you want to determine the first value for. +* `partition_field` (required): Name of the field you want to partition by to determine the first_value. +* `order_by_field` (required): Name of the field you wish to sort on to determine the first_value. +* `order` (optional): The order of which you want to partition the window frame. The order argument by default is `asc`. If you wish to get the last_value, you may change the argument to `desc`. + ---- ### json_parse ([source](macros/json_parse.sql)) This macro allows for cross database use of the json extract function, specifically used to parse and extract a nested value from a json object. From 50927b8d96987bb27dc6a679086584a779889439 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Tue, 28 Nov 2023 16:12:59 -0600 Subject: [PATCH 6/8] update packages and dispatch --- integration_tests/packages.yml | 4 ++-- macros/extract_url_parameter.sql | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/integration_tests/packages.yml b/integration_tests/packages.yml index 976ad3a..1f95659 100644 --- a/integration_tests/packages.yml +++ b/integration_tests/packages.yml @@ -15,7 +15,7 @@ packages: version: [">=0.1.0", "<1.0.0"] - package: fivetran/salesforce - version: [">=0.1.0", "<1.0.0"] + version: [">=1.0.0", "<2.0.0"] - package: fivetran/hubspot version: [">=0.1.0", "<1.0.0"] @@ -51,7 +51,7 @@ packages: version: [">=0.1.0", "<1.0.0"] - package: fivetran/fivetran_log - version: [">=0.1.0", "<1.0.0"] + version: [">=1.0.0", "<2.0.0"] - package: fivetran/lever version: [">=0.1.0", "<1.0.0"] - package: fivetran/greenhouse diff --git a/macros/extract_url_parameter.sql b/macros/extract_url_parameter.sql index a6421e8..4597803 100644 --- a/macros/extract_url_parameter.sql +++ b/macros/extract_url_parameter.sql @@ -12,7 +12,7 @@ {%- endmacro %} -{% macro databricks__extract_url_parameter(field, url_parameter) -%} +{% macro spark__extract_url_parameter(field, url_parameter) -%} {%- set formatted_url_parameter = "'" + url_parameter + "=([^&]+)'" -%} nullif(regexp_extract({{ field }}, {{ formatted_url_parameter }}, 1), '') From 7b085b5a19aaad941b14f3163b0834002c178b79 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Tue, 28 Nov 2023 17:08:32 -0600 Subject: [PATCH 7/8] update run models --- .buildkite/scripts/run_standard_models.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.buildkite/scripts/run_standard_models.sh b/.buildkite/scripts/run_standard_models.sh index 3bce7b2..a80a71d 100644 --- a/.buildkite/scripts/run_standard_models.sh +++ b/.buildkite/scripts/run_standard_models.sh @@ -40,6 +40,9 @@ do perl -i -pe "s/(schema: |dataset: ).*/\1$value_to_replace/" ~/.dbt/profiles.yml elif [ "$package" = "social_media_reporting" ]; then perl -i -pe "s/(schema: |dataset: ).*/\1social_media_rollup_integration_tests/" ~/.dbt/profiles.yml + elif [ "$package" = "fivetran_log" ]; then + value_to_replace=$(grep "fivetran_platform_schema:" dbt_project.yml | awk '{ print $2 }') + perl -i -pe "s/(schema: |dataset: ).*/\1$value_to_replace/" ~/.dbt/profiles.yml else value_to_replace=$(grep ""$package"_schema:" dbt_project.yml | awk '{ print $2 }') perl -i -pe "s/(schema: |dataset: ).*/\1$value_to_replace/" ~/.dbt/profiles.yml From d90bd911dfa9422768389abfffca4b260cc6c4d8 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 29 Nov 2023 10:44:11 -0600 Subject: [PATCH 8/8] update readme --- README.md | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index b8dd32e..cfa3c72 100644 --- a/README.md +++ b/README.md @@ -184,19 +184,6 @@ This macro extracts a url parameter from a column containing a url. It is an exp * `field` (required): The name of the column containing the url. * `url_parameter` (required): The parameter you want to extract. ----- -### json_extract ([source](macros/json_extract.sql)) -This macro allows for cross database use of the json extract function. The json extract allows the return of data from a json object. -The data is returned by the path you provide as the argument. The json_extract macro is compatible with BigQuery, Redshift, Postgres, and Snowflake. - -**Usage:** -```sql -{{ fivetran_utils.json_extract(string="value", string_path="in_business_hours") }} -``` -**Args:** -* `string` (required): Name of the field which contains the json object. -* `string_path` (required): Name of the path in the json object which you want to extract the data from. - ---- ### first_value ([source](macros/first_value.sql)) This macro returns the value_expression for the first row in the current window frame with cross db functionality. This macro ignores null values. The default first_value calculation within the macro is the `first_value` function. The Redshift first_value calculation is the `first_value` function, with the inclusion of a frame_clause `{{ partition_field }} rows unbounded preceding`. @@ -211,6 +198,19 @@ This macro returns the value_expression for the first row in the current window * `order_by_field` (required): Name of the field you wish to sort on to determine the first_value. * `order` (optional): The order of which you want to partition the window frame. The order argument by default is `asc`. If you wish to get the last_value, you may change the argument to `desc`. +---- +### json_extract ([source](macros/json_extract.sql)) +This macro allows for cross database use of the json extract function. The json extract allows the return of data from a json object. +The data is returned by the path you provide as the argument. The json_extract macro is compatible with BigQuery, Redshift, Postgres, and Snowflake. + +**Usage:** +```sql +{{ fivetran_utils.json_extract(string="value", string_path="in_business_hours") }} +``` +**Args:** +* `string` (required): Name of the field which contains the json object. +* `string_path` (required): Name of the path in the json object which you want to extract the data from. + ---- ### json_parse ([source](macros/json_parse.sql)) This macro allows for cross database use of the json extract function, specifically used to parse and extract a nested value from a json object.