From d04b80a1c6dd9bba1f671f01fc5014bd74ab6037 Mon Sep 17 00:00:00 2001 From: open-metadata Date: Tue, 4 Feb 2025 09:21:52 +0000 Subject: [PATCH] See https://github.com/open-metadata/OpenMetadata/commit/e65f686ca9b95310ef5744cabce0d2cef54889e8 from refs/heads/main --- .../dashboard/powerbi-report-server/index.md | 2 +- .../dashboard/powerbi-report-server/yaml.md | 2 +- .../connectors/database/deltalake/index.md | 10 +++++----- .../connectors/database/deltalake/yaml.md | 12 ++++++------ .../connectors/database/singlestore/index.md | 6 +++--- .../connectors/database/singlestore/yaml.md | 10 +++++----- .../v1.6.x/connectors/pipeline/nifi/index.md | 16 ++++++++-------- .../v1.6.x/connectors/pipeline/nifi/yaml.md | 16 ++++++++-------- .../dashboard/powerbi-report-server/index.md | 2 +- .../dashboard/powerbi-report-server/yaml.md | 2 +- .../connectors/database/deltalake/index.md | 10 +++++----- .../connectors/database/deltalake/yaml.md | 12 ++++++------ .../connectors/database/singlestore/index.md | 6 +++--- .../connectors/database/singlestore/yaml.md | 12 ++++++------ .../connectors/pipeline/nifi/index.md | 18 +++++++++--------- .../connectors/pipeline/nifi/yaml.md | 16 ++++++++-------- 16 files changed, 76 insertions(+), 76 deletions(-) diff --git a/content/v1.6.x/connectors/dashboard/powerbi-report-server/index.md b/content/v1.6.x/connectors/dashboard/powerbi-report-server/index.md index 7ba0d77b..c0fcb036 100644 --- a/content/v1.6.x/connectors/dashboard/powerbi-report-server/index.md +++ b/content/v1.6.x/connectors/dashboard/powerbi-report-server/index.md @@ -4,7 +4,7 @@ slug: /connectors/dashboard/powerbireportserver --- {% connectorDetailsHeader - name="PowerBIReportServer" + name="PowerBI Report Server" stage="BETA" platform="Collate" availableFeatures=["Dashboards"] diff --git a/content/v1.6.x/connectors/dashboard/powerbi-report-server/yaml.md b/content/v1.6.x/connectors/dashboard/powerbi-report-server/yaml.md index ddea91dd..887cbf82 100644 --- a/content/v1.6.x/connectors/dashboard/powerbi-report-server/yaml.md +++ b/content/v1.6.x/connectors/dashboard/powerbi-report-server/yaml.md @@ -4,7 +4,7 @@ slug: /connectors/dashboard/powerbireportserver/yaml --- {% connectorDetailsHeader - name="PowerBIReportServer" + name="PowerBI Report Server" stage="BETA" platform="Collate" availableFeatures=["Dashboards"] diff --git a/content/v1.6.x/connectors/database/deltalake/index.md b/content/v1.6.x/connectors/database/deltalake/index.md index 12e14f33..bca5b060 100644 --- a/content/v1.6.x/connectors/database/deltalake/index.md +++ b/content/v1.6.x/connectors/database/deltalake/index.md @@ -4,7 +4,7 @@ slug: /connectors/database/deltalake --- {% connectorDetailsHeader -name="DeltaLake" +name="Delta Lake" stage="PROD" platform="OpenMetadata" availableFeatures=["Metadata", "dbt"] @@ -12,9 +12,9 @@ unavailableFeatures=["Query Usage", "Data Profiler", "Data Quality", "Lineage", / %} -In this section, we provide guides and references to use the Deltalake connector. +In this section, we provide guides and references to use the Delta Lake connector. -Configure and schedule Deltalake metadata and profiler workflows from the OpenMetadata UI: +Configure and schedule Delta Lake metadata and profiler workflows from the OpenMetadata UI: - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) @@ -25,10 +25,10 @@ Configure and schedule Deltalake metadata and profiler workflows from the OpenMe ## Requirements -Deltalake requires to run with Python 3.8, 3.9 or 3.10. We do not yet support the Delta connector +Delta Lake requires to run with Python 3.8, 3.9 or 3.10. We do not yet support the Delta connector for Python 3.11 -The DeltaLake connector is able to extract the information from a **metastore** or directly from the **storage**. +The Delta Lake connector is able to extract the information from a **metastore** or directly from the **storage**. If extracting directly from the storage, some extra requirements are needed depending on the storage diff --git a/content/v1.6.x/connectors/database/deltalake/yaml.md b/content/v1.6.x/connectors/database/deltalake/yaml.md index b22ecd60..edbcad3f 100644 --- a/content/v1.6.x/connectors/database/deltalake/yaml.md +++ b/content/v1.6.x/connectors/database/deltalake/yaml.md @@ -4,16 +4,16 @@ slug: /connectors/database/deltalake/yaml --- {% connectorDetailsHeader -name="DeltaLake" +name="Delta Lake" stage="PROD" platform="OpenMetadata" availableFeatures=["Metadata", "dbt"] unavailableFeatures=["Query Usage", "Data Profiler", "Data Quality", "Lineage", "Column-level Lineage", "Owners", "Tags", "Stored Procedures"] / %} -In this section, we provide guides and references to use the Deltalake connector. +In this section, we provide guides and references to use the Delta Lake connector. -Configure and schedule Deltalake metadata and profiler workflows from the OpenMetadata UI: +Configure and schedule Delta Lake metadata and profiler workflows from the OpenMetadata UI: - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) @@ -23,14 +23,14 @@ Configure and schedule Deltalake metadata and profiler workflows from the OpenMe ## Requirements -Deltalake requires to run with Python 3.8, 3.9 or 3.10. We do not yet support the Delta connector +Delta Lake requires to run with Python 3.8, 3.9 or 3.10. We do not yet support the Delta connector for Python 3.11 ### Python Requirements {% partial file="/v1.6/connectors/python-requirements.md" /%} -To run the Deltalake ingestion, you will need to install: +To run the Delta Lake ingestion, you will need to install: - If extracting from a metastore @@ -49,7 +49,7 @@ pip3 install "openmetadata-ingestion[deltalake-storage]" All connectors are defined as JSON Schemas. [Here](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/deltaLakeConnection.json) -you can find the structure to create a connection to Deltalake. +you can find the structure to create a connection to Delta Lake. In order to create and run a Metadata Ingestion workflow, we will follow the steps to create a YAML configuration able to connect to the source, diff --git a/content/v1.6.x/connectors/database/singlestore/index.md b/content/v1.6.x/connectors/database/singlestore/index.md index 84fdab5d..6888cc62 100644 --- a/content/v1.6.x/connectors/database/singlestore/index.md +++ b/content/v1.6.x/connectors/database/singlestore/index.md @@ -4,16 +4,16 @@ slug: /connectors/database/singlestore --- {% connectorDetailsHeader -name="Singlestore" +name="SingleStore" stage="PROD" platform="OpenMetadata" availableFeatures=["Metadata", "Data Profiler", "Data Quality", "View Lineage", "View Column-level Lineage", "dbt"] unavailableFeatures=["Query Usage", "Stored Procedures", "Owners", "Tags"] / %} -In this section, we provide guides and references to use the Singlestore connector. +In this section, we provide guides and references to use the SingleStore connector. -Configure and schedule Singlestore metadata and profiler workflows from the OpenMetadata UI: +Configure and schedule SingleStore metadata and profiler workflows from the OpenMetadata UI: - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) diff --git a/content/v1.6.x/connectors/database/singlestore/yaml.md b/content/v1.6.x/connectors/database/singlestore/yaml.md index ecdd0b2a..4544c703 100644 --- a/content/v1.6.x/connectors/database/singlestore/yaml.md +++ b/content/v1.6.x/connectors/database/singlestore/yaml.md @@ -4,7 +4,7 @@ slug: /connectors/database/singlestore/yaml --- {% connectorDetailsHeader -name="Singlestore" +name="SingleStore" stage="PROD" platform="OpenMetadata" availableFeatures=["Metadata", "Data Profiler", "Data Quality", "View Lineage", "View Column-level Lineage", "dbt"] @@ -12,9 +12,9 @@ unavailableFeatures=["Query Usage", "Stored Procedures", "Owners", "Tags"] / %} -In this section, we provide guides and references to use the Singlestore connector. +In this section, we provide guides and references to use the SingleStore connector. -Configure and schedule Singlestore metadata and profiler workflows from the OpenMetadata UI: +Configure and schedule SingleStore metadata and profiler workflows from the OpenMetadata UI: - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) @@ -31,7 +31,7 @@ Configure and schedule Singlestore metadata and profiler workflows from the Open {% partial file="/v1.6/connectors/python-requirements.md" /%} -To run the Singlestore ingestion, you will need to install: +To run the SingleStore ingestion, you will need to install: ```bash pip3 install "openmetadata-ingestion[singlestore]" @@ -52,7 +52,7 @@ The workflow is modeled around the following ### 1. Define the YAML Config -This is a sample config for Singlestore: +This is a sample config for SingleStore: {% codePreview %} diff --git a/content/v1.6.x/connectors/pipeline/nifi/index.md b/content/v1.6.x/connectors/pipeline/nifi/index.md index b44ac0fc..f56f8d21 100644 --- a/content/v1.6.x/connectors/pipeline/nifi/index.md +++ b/content/v1.6.x/connectors/pipeline/nifi/index.md @@ -4,16 +4,16 @@ slug: /connectors/pipeline/nifi --- {% connectorDetailsHeader -name="Nifi" +name="NiFi" stage="PROD" platform="OpenMetadata" availableFeatures=["Pipelines"] unavailableFeatures=["Pipeline Status", "Owners", "Tags", "Lineage"] / %} -In this section, we provide guides and references to use the Nifi connector. +In this section, we provide guides and references to use the NiFi connector. -Configure and schedule Nifi metadata workflows from the OpenMetadata UI: +Configure and schedule NiFi metadata workflows from the OpenMetadata UI: - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) @@ -23,11 +23,11 @@ Configure and schedule Nifi metadata workflows from the OpenMetadata UI: ## Requirements ### Metadata -OpenMetadata supports 2 types of connection for the Nifi connector: -- **basic authentication**: use username/password to authenticate to Nifi. +OpenMetadata supports 2 types of connection for the NiFi connector: +- **basic authentication**: use username/password to authenticate to NiFi. - **client certificate authentication**: use CA, client certificate and client key files to authenticate. -The user should be able to send request to the Nifi API and access the `Resources` endpoint. +The user should be able to send request to the NiFi API and access the `Resources` endpoint. ## Metadata Ingestion @@ -48,9 +48,9 @@ The user should be able to send request to the Nifi API and access the `Resource - **Host and Port**: Pipeline Service Management/UI URI. This should be specified as a string in the format 'hostname:port'. -- **Nifi Config**: OpenMetadata supports username/password or client certificate authentication. +- **NiFi Config**: OpenMetadata supports username/password or client certificate authentication. 1. Basic Authentication - - Username: Username to connect to Nifi. This user should be able to send request to the Nifi API and access the `Resources` endpoint. + - Username: Username to connect to NiFi. This user should be able to send request to the NiFi API and access the `Resources` endpoint. - Password: Password to connect to Nifi. - Verify SSL: Whether SSL verification should be perform when authenticating. 2. Client Certificate Authentication diff --git a/content/v1.6.x/connectors/pipeline/nifi/yaml.md b/content/v1.6.x/connectors/pipeline/nifi/yaml.md index 0029a978..57f7b70c 100644 --- a/content/v1.6.x/connectors/pipeline/nifi/yaml.md +++ b/content/v1.6.x/connectors/pipeline/nifi/yaml.md @@ -4,16 +4,16 @@ slug: /connectors/pipeline/nifi/yaml --- {% connectorDetailsHeader -name="Nifi" +name="NiFi" stage="PROD" platform="OpenMetadata" availableFeatures=["Pipelines"] unavailableFeatures=["Pipeline Status", "Owners", "Tags", "Lineage"] / %} -In this section, we provide guides and references to use the Nifi connector. +In this section, we provide guides and references to use the NiFi connector. -Configure and schedule Nifi metadata and profiler workflows from the OpenMetadata UI: +Configure and schedule NiFi metadata and profiler workflows from the OpenMetadata UI: - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) @@ -26,7 +26,7 @@ Configure and schedule Nifi metadata and profiler workflows from the OpenMetadat {% partial file="/v1.6/connectors/python-requirements.md" /%} -To run the Nifi ingestion, you will need to install: +To run the NiFi ingestion, you will need to install: ```bash pip3 install "openmetadata-ingestion[nifi]" @@ -47,7 +47,7 @@ The workflow is modeled around the following ### 1. Define the YAML Config -This is a sample config for Nifi: +This is a sample config for NiFi: {% codePreview %} @@ -58,10 +58,10 @@ This is a sample config for Nifi: {% codeInfo srNumber=1 %} **hostPort**: Pipeline Service Management UI URL -**nifiConfig**: one of +**NiFiConfig**: one of **1.** Using Basic authentication - - **username**: Username to connect to Nifi. This user should be able to send request to the Nifi API and access the `Resources` endpoint. - - **password**: Password to connect to Nifi. + - **username**: Username to connect to NiFi. This user should be able to send request to the NiFi API and access the `Resources` endpoint. + - **password**: Password to connect to NiFi. - **verifySSL**: Whether SSL verification should be perform when authenticating. **2.** Using client certificate authentication - **certificateAuthorityPath**: Path to the certificate authority (CA) file. This is the certificate used to store and issue your digital certificate. This is an optional parameter. If omitted SSL verification will be skipped; this can present some sever security issue. diff --git a/content/v1.7.x-SNAPSHOT/connectors/dashboard/powerbi-report-server/index.md b/content/v1.7.x-SNAPSHOT/connectors/dashboard/powerbi-report-server/index.md index 92be3f90..6a5acb69 100644 --- a/content/v1.7.x-SNAPSHOT/connectors/dashboard/powerbi-report-server/index.md +++ b/content/v1.7.x-SNAPSHOT/connectors/dashboard/powerbi-report-server/index.md @@ -4,7 +4,7 @@ slug: /connectors/dashboard/powerbireportserver --- {% connectorDetailsHeader - name="PowerBIReportServer" + name="PowerBI Report Server" stage="BETA" platform="Collate" availableFeatures=["Dashboards"] diff --git a/content/v1.7.x-SNAPSHOT/connectors/dashboard/powerbi-report-server/yaml.md b/content/v1.7.x-SNAPSHOT/connectors/dashboard/powerbi-report-server/yaml.md index e4e3cd2d..a8bda2b2 100644 --- a/content/v1.7.x-SNAPSHOT/connectors/dashboard/powerbi-report-server/yaml.md +++ b/content/v1.7.x-SNAPSHOT/connectors/dashboard/powerbi-report-server/yaml.md @@ -4,7 +4,7 @@ slug: /connectors/dashboard/powerbireportserver/yaml --- {% connectorDetailsHeader - name="PowerBIReportServer" + name="PowerBI Report Server" stage="BETA" platform="Collate" availableFeatures=["Dashboards"] diff --git a/content/v1.7.x-SNAPSHOT/connectors/database/deltalake/index.md b/content/v1.7.x-SNAPSHOT/connectors/database/deltalake/index.md index 8c57371b..dc660dc8 100644 --- a/content/v1.7.x-SNAPSHOT/connectors/database/deltalake/index.md +++ b/content/v1.7.x-SNAPSHOT/connectors/database/deltalake/index.md @@ -4,7 +4,7 @@ slug: /connectors/database/deltalake --- {% connectorDetailsHeader -name="DeltaLake" +name="Delta Lake" stage="PROD" platform="OpenMetadata" availableFeatures=["Metadata", "dbt"] @@ -12,9 +12,9 @@ unavailableFeatures=["Query Usage", "Data Profiler", "Data Quality", "Lineage", / %} -In this section, we provide guides and references to use the Deltalake connector. +In this section, we provide guides and references to use the Delta Lake connector. -Configure and schedule Deltalake metadata and profiler workflows from the OpenMetadata UI: +Configure and schedule Delta Lake metadata and profiler workflows from the OpenMetadata UI: - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) @@ -25,10 +25,10 @@ Configure and schedule Deltalake metadata and profiler workflows from the OpenMe ## Requirements -Deltalake requires to run with Python 3.8, 3.9 or 3.10. We do not yet support the Delta connector +Delta Lake requires to run with Python 3.8, 3.9 or 3.10. We do not yet support the Delta connector for Python 3.11 -The DeltaLake connector is able to extract the information from a **metastore** or directly from the **storage**. +The Delta Lake connector is able to extract the information from a **metastore** or directly from the **storage**. If extracting directly from the storage, some extra requirements are needed depending on the storage diff --git a/content/v1.7.x-SNAPSHOT/connectors/database/deltalake/yaml.md b/content/v1.7.x-SNAPSHOT/connectors/database/deltalake/yaml.md index 9adf1bb8..bdfb118e 100644 --- a/content/v1.7.x-SNAPSHOT/connectors/database/deltalake/yaml.md +++ b/content/v1.7.x-SNAPSHOT/connectors/database/deltalake/yaml.md @@ -4,16 +4,16 @@ slug: /connectors/database/deltalake/yaml --- {% connectorDetailsHeader -name="DeltaLake" +name="Delta Lake" stage="PROD" platform="OpenMetadata" availableFeatures=["Metadata", "dbt"] unavailableFeatures=["Query Usage", "Data Profiler", "Data Quality", "Lineage", "Column-level Lineage", "Owners", "Tags", "Stored Procedures"] / %} -In this section, we provide guides and references to use the Deltalake connector. +In this section, we provide guides and references to use the Delta Lake connector. -Configure and schedule Deltalake metadata and profiler workflows from the OpenMetadata UI: +Configure and schedule Delta Lake metadata and profiler workflows from the OpenMetadata UI: - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) @@ -23,14 +23,14 @@ Configure and schedule Deltalake metadata and profiler workflows from the OpenMe ## Requirements -Deltalake requires to run with Python 3.8, 3.9 or 3.10. We do not yet support the Delta connector +Delta Lake requires to run with Python 3.8, 3.9 or 3.10. We do not yet support the Delta connector for Python 3.11 ### Python Requirements {% partial file="/v1.7/connectors/python-requirements.md" /%} -To run the Deltalake ingestion, you will need to install: +To run the Delta Lake ingestion, you will need to install: - If extracting from a metastore @@ -49,7 +49,7 @@ pip3 install "openmetadata-ingestion[deltalake-storage]" All connectors are defined as JSON Schemas. [Here](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/deltaLakeConnection.json) -you can find the structure to create a connection to Deltalake. +you can find the structure to create a connection to Delta Lake. In order to create and run a Metadata Ingestion workflow, we will follow the steps to create a YAML configuration able to connect to the source, diff --git a/content/v1.7.x-SNAPSHOT/connectors/database/singlestore/index.md b/content/v1.7.x-SNAPSHOT/connectors/database/singlestore/index.md index db1ed8ca..cc5fa2d1 100644 --- a/content/v1.7.x-SNAPSHOT/connectors/database/singlestore/index.md +++ b/content/v1.7.x-SNAPSHOT/connectors/database/singlestore/index.md @@ -4,16 +4,16 @@ slug: /connectors/database/singlestore --- {% connectorDetailsHeader -name="Singlestore" +name="SingleStore" stage="PROD" platform="OpenMetadata" availableFeatures=["Metadata", "Data Profiler", "Data Quality", "View Lineage", "View Column-level Lineage", "dbt"] unavailableFeatures=["Query Usage", "Stored Procedures", "Owners", "Tags"] / %} -In this section, we provide guides and references to use the Singlestore connector. +In this section, we provide guides and references to use the SingleStore connector. -Configure and schedule Singlestore metadata and profiler workflows from the OpenMetadata UI: +Configure and schedule SingleStore metadata and profiler workflows from the OpenMetadata UI: - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) diff --git a/content/v1.7.x-SNAPSHOT/connectors/database/singlestore/yaml.md b/content/v1.7.x-SNAPSHOT/connectors/database/singlestore/yaml.md index 930c976d..5c1c8d65 100644 --- a/content/v1.7.x-SNAPSHOT/connectors/database/singlestore/yaml.md +++ b/content/v1.7.x-SNAPSHOT/connectors/database/singlestore/yaml.md @@ -4,7 +4,7 @@ slug: /connectors/database/singlestore/yaml --- {% connectorDetailsHeader -name="Singlestore" +name="SingleStore" stage="PROD" platform="OpenMetadata" availableFeatures=["Metadata", "Data Profiler", "Data Quality", "View Lineage", "View Column-level Lineage", "dbt"] @@ -12,9 +12,9 @@ unavailableFeatures=["Query Usage", "Stored Procedures", "Owners", "Tags"] / %} -In this section, we provide guides and references to use the Singlestore connector. +In this section, we provide guides and references to use the SingleStore connector. -Configure and schedule Singlestore metadata and profiler workflows from the OpenMetadata UI: +Configure and schedule SingleStore metadata and profiler workflows from the OpenMetadata UI: - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) @@ -31,7 +31,7 @@ Configure and schedule Singlestore metadata and profiler workflows from the Open {% partial file="/v1.7/connectors/python-requirements.md" /%} -To run the Singlestore ingestion, you will need to install: +To run the SingleStore ingestion, you will need to install: ```bash pip3 install "openmetadata-ingestion[singlestore]" @@ -41,7 +41,7 @@ pip3 install "openmetadata-ingestion[singlestore]" All connectors are defined as JSON Schemas. [Here](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/database/singleStoreConnection.json) -you can find the structure to create a connection to Singlestore. +you can find the structure to create a connection to SingleStore. In order to create and run a Metadata Ingestion workflow, we will follow the steps to create a YAML configuration able to connect to the source, @@ -52,7 +52,7 @@ The workflow is modeled around the following ### 1. Define the YAML Config -This is a sample config for Singlestore: +This is a sample config for SingleStore: {% codePreview %} diff --git a/content/v1.7.x-SNAPSHOT/connectors/pipeline/nifi/index.md b/content/v1.7.x-SNAPSHOT/connectors/pipeline/nifi/index.md index d4452ca2..a0f03c39 100644 --- a/content/v1.7.x-SNAPSHOT/connectors/pipeline/nifi/index.md +++ b/content/v1.7.x-SNAPSHOT/connectors/pipeline/nifi/index.md @@ -4,16 +4,16 @@ slug: /connectors/pipeline/nifi --- {% connectorDetailsHeader -name="Nifi" +name="NiFi" stage="PROD" platform="OpenMetadata" availableFeatures=["Pipelines"] unavailableFeatures=["Pipeline Status", "Owners", "Tags", "Lineage"] / %} -In this section, we provide guides and references to use the Nifi connector. +In this section, we provide guides and references to use the NiFi connector. -Configure and schedule Nifi metadata workflows from the OpenMetadata UI: +Configure and schedule NiFi metadata workflows from the OpenMetadata UI: - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) @@ -23,11 +23,11 @@ Configure and schedule Nifi metadata workflows from the OpenMetadata UI: ## Requirements ### Metadata -OpenMetadata supports 2 types of connection for the Nifi connector: -- **basic authentication**: use username/password to authenticate to Nifi. +OpenMetadata supports 2 types of connection for the NiFi connector: +- **basic authentication**: use username/password to authenticate to NiFi. - **client certificate authentication**: use CA, client certificate and client key files to authenticate. -The user should be able to send request to the Nifi API and access the `Resources` endpoint. +The user should be able to send request to the NiFi API and access the `Resources` endpoint. ## Metadata Ingestion @@ -48,10 +48,10 @@ The user should be able to send request to the Nifi API and access the `Resource - **Host and Port**: Pipeline Service Management/UI URI. This should be specified as a string in the format 'hostname:port'. -- **Nifi Config**: OpenMetadata supports username/password or client certificate authentication. +- **NiFi Config**: OpenMetadata supports username/password or client certificate authentication. 1. Basic Authentication - - Username: Username to connect to Nifi. This user should be able to send request to the Nifi API and access the `Resources` endpoint. - - Password: Password to connect to Nifi. + - Username: Username to connect to NiFi. This user should be able to send request to the Nifi API and access the `Resources` endpoint. + - Password: Password to connect to NiFi. - Verify SSL: Whether SSL verification should be perform when authenticating. 2. Client Certificate Authentication - Certificate Authority Path: Path to the certificate authority (CA) file. This is the certificate used to store and issue your digital certificate. This is an optional parameter. If omitted SSL verification will be skipped; this can present some sever security issue. diff --git a/content/v1.7.x-SNAPSHOT/connectors/pipeline/nifi/yaml.md b/content/v1.7.x-SNAPSHOT/connectors/pipeline/nifi/yaml.md index a236127f..56444077 100644 --- a/content/v1.7.x-SNAPSHOT/connectors/pipeline/nifi/yaml.md +++ b/content/v1.7.x-SNAPSHOT/connectors/pipeline/nifi/yaml.md @@ -11,9 +11,9 @@ availableFeatures=["Pipelines"] unavailableFeatures=["Pipeline Status", "Owners", "Tags", "Lineage"] / %} -In this section, we provide guides and references to use the Nifi connector. +In this section, we provide guides and references to use the NiFi connector. -Configure and schedule Nifi metadata and profiler workflows from the OpenMetadata UI: +Configure and schedule NiFi metadata and profiler workflows from the OpenMetadata UI: - [Requirements](#requirements) - [Metadata Ingestion](#metadata-ingestion) @@ -26,7 +26,7 @@ Configure and schedule Nifi metadata and profiler workflows from the OpenMetadat {% partial file="/v1.7/connectors/python-requirements.md" /%} -To run the Nifi ingestion, you will need to install: +To run the NiFi ingestion, you will need to install: ```bash pip3 install "openmetadata-ingestion[nifi]" @@ -36,7 +36,7 @@ pip3 install "openmetadata-ingestion[nifi]" All connectors are defined as JSON Schemas. [Here](https://github.com/open-metadata/OpenMetadata/blob/main/openmetadata-spec/src/main/resources/json/schema/entity/services/connections/pipeline/nifiConnection.json) -you can find the structure to create a connection to Nifi. +you can find the structure to create a connection to NiFi. In order to create and run a Metadata Ingestion workflow, we will follow the steps to create a YAML configuration able to connect to the source, @@ -47,7 +47,7 @@ The workflow is modeled around the following ### 1. Define the YAML Config -This is a sample config for Nifi: +This is a sample config for NiFi: {% codePreview %} @@ -58,10 +58,10 @@ This is a sample config for Nifi: {% codeInfo srNumber=1 %} **hostPort**: Pipeline Service Management UI URL -**nifiConfig**: one of +**NiFiConfig**: one of **1.** Using Basic authentication - - **username**: Username to connect to Nifi. This user should be able to send request to the Nifi API and access the `Resources` endpoint. - - **password**: Password to connect to Nifi. + - **username**: Username to connect to NiFi. This user should be able to send request to the NiFi API and access the `Resources` endpoint. + - **password**: Password to connect to NiFi. - **verifySSL**: Whether SSL verification should be perform when authenticating. **2.** Using client certificate authentication - **certificateAuthorityPath**: Path to the certificate authority (CA) file. This is the certificate used to store and issue your digital certificate. This is an optional parameter. If omitted SSL verification will be skipped; this can present some sever security issue.