Skip to content

Commit

Permalink
Fix example import tests after move of providers to new structure (#4…
Browse files Browse the repository at this point in the history
  • Loading branch information
jscheffl authored Jan 28, 2025
1 parent 4db60c1 commit 085084b
Show file tree
Hide file tree
Showing 12 changed files with 41 additions and 30 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,15 @@

from __future__ import annotations

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator
from airflow.providers.apache.beam.tests.system.apache.beam.utils import (
from providers.apache.beam.tests.system.apache.beam.utils import (
GCS_INPUT,
GCS_JAR_DIRECT_RUNNER_BUCKET_NAME,
GCS_JAR_DIRECT_RUNNER_OBJECT_NAME,
START_DATE,
)

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator
from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator

with models.DAG(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,15 @@

from __future__ import annotations

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator
from airflow.providers.apache.beam.tests.system.apache.beam.utils import (
from providers.apache.beam.tests.system.apache.beam.utils import (
GCS_INPUT,
GCS_JAR_FLINK_RUNNER_BUCKET_NAME,
GCS_JAR_FLINK_RUNNER_OBJECT_NAME,
START_DATE,
)

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator
from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator

with models.DAG(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,15 @@

from __future__ import annotations

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator
from airflow.providers.apache.beam.tests.system.apache.beam.utils import (
from providers.apache.beam.tests.system.apache.beam.utils import (
GCS_INPUT,
GCS_JAR_SPARK_RUNNER_BUCKET_NAME,
GCS_JAR_SPARK_RUNNER_OBJECT_NAME,
START_DATE,
)

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator
from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator

with models.DAG(
Expand Down
7 changes: 4 additions & 3 deletions providers/apache/beam/tests/system/apache/beam/example_go.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,7 @@

from __future__ import annotations

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunGoPipelineOperator
from airflow.providers.apache.beam.tests.system.apache.beam.utils import (
from providers.apache.beam.tests.system.apache.beam.utils import (
DEFAULT_ARGS,
GCP_PROJECT_ID,
GCS_GO,
Expand All @@ -32,6 +30,9 @@
GCS_TMP,
START_DATE,
)

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunGoPipelineOperator
from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration

with models.DAG(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,7 @@

from __future__ import annotations

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunGoPipelineOperator
from airflow.providers.apache.beam.tests.system.apache.beam.utils import (
from providers.apache.beam.tests.system.apache.beam.utils import (
DEFAULT_ARGS,
GCP_PROJECT_ID,
GCS_GO_DATAFLOW_ASYNC,
Expand All @@ -32,6 +30,9 @@
GCS_TMP,
START_DATE,
)

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunGoPipelineOperator
from airflow.providers.google.cloud.hooks.dataflow import DataflowJobStatus
from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration
from airflow.providers.google.cloud.sensors.dataflow import DataflowJobStatusSensor
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,17 @@

from __future__ import annotations

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator
from airflow.providers.apache.beam.tests.system.apache.beam.utils import (
from providers.apache.beam.tests.system.apache.beam.utils import (
GCS_JAR_DATAFLOW_RUNNER_BUCKET_NAME,
GCS_JAR_DATAFLOW_RUNNER_OBJECT_NAME,
GCS_OUTPUT,
GCS_STAGING,
GCS_TMP,
START_DATE,
)

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunJavaPipelineOperator
from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator

with models.DAG(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,7 @@

from __future__ import annotations

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator
from airflow.providers.apache.beam.tests.system.apache.beam.utils import (
from providers.apache.beam.tests.system.apache.beam.utils import (
DEFAULT_ARGS,
GCP_PROJECT_ID,
GCS_OUTPUT,
Expand All @@ -32,6 +30,9 @@
GCS_TMP,
START_DATE,
)

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator
from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration

with models.DAG(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,7 @@

from __future__ import annotations

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator
from airflow.providers.apache.beam.tests.system.apache.beam.utils import (
from providers.apache.beam.tests.system.apache.beam.utils import (
DEFAULT_ARGS,
GCP_PROJECT_ID,
GCS_OUTPUT,
Expand All @@ -32,6 +30,9 @@
GCS_TMP,
START_DATE,
)

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator
from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration

with models.DAG(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,7 @@

from __future__ import annotations

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator
from airflow.providers.apache.beam.tests.system.apache.beam.utils import (
from providers.apache.beam.tests.system.apache.beam.utils import (
DEFAULT_ARGS,
GCP_PROJECT_ID,
GCS_OUTPUT,
Expand All @@ -32,6 +30,9 @@
GCS_TMP,
START_DATE,
)

from airflow import models
from airflow.providers.apache.beam.operators.beam import BeamRunPythonPipelineOperator
from airflow.providers.google.cloud.hooks.dataflow import DataflowJobStatus
from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration
from airflow.providers.google.cloud.sensors.dataflow import DataflowJobStatusSensor
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,9 @@
import os
from datetime import datetime

from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator

from airflow import DAG
from airflow.providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator
from airflow.providers.standard.operators.python import PythonOperator


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@
import os
from datetime import datetime, timedelta

from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator

from airflow import DAG
from airflow.models import Variable
from airflow.providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator
from airflow.providers.standard.operators.python import PythonOperator
from airflow.providers.standard.sensors.time_delta import TimeDeltaSensor

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@
from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator
from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator
from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator
from airflow.providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator
from airflow.utils.trigger_rule import TriggerRule
from providers.openlineage.tests.system.openlineage.operator import OpenLineageTestOperator

from providers.tests.system.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID

Expand Down

0 comments on commit 085084b

Please sign in to comment.