Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: platform becomes pipeline dag #25

Merged
merged 1 commit into from
Sep 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Configuration class for the platform pipeline."""
"""Configuration class for the unified pipeline."""

from pathlib import Path
from typing import Any
Expand All @@ -7,24 +7,25 @@


class PlatformConfig:
"""Configuration class for the platform pipeline.
"""Configuration class for the platform part of the unified pipeline.

This class reads the configuration files for the platform dag, PIS and ETL
applications, performs some operations on them and then exposes the values.
This class reads the configuration files for both the platform part of the
unified pipeline dag as well as PIS and ETL applications, performs some
operations on them and then exposes the values.

Some fields in PIS and ETL application configuration files are replaced with
values from the platform dag configuration, which is the only one the user of
values from the pipeline dag configuration, which is the only one the user of
the orchestrator has to modify to run the pipeline.

The configuration files are expected to be in the same directory as this file.
They are:
- `platform.yaml`: contains the general configuration for the pipeline.
- `unified_pipeline.yaml`: contains the general configuration for the pipeline.
- `pis.yaml`: contains the configuration for the PIS steps.
- `etl.conf`: contains the configuration for the ETL steps.
"""

def __init__(self) -> None:
self.platform_config_path = Path(__file__).parent / "platform.yaml"
self.platform_config_path = Path(__file__).parent / "unified_pipeline.yaml"
self.pis_config_local_path = Path(__file__).parent / "pis.yaml"
self.etl_config_local_path = Path(__file__).parent / "etl.conf"

Expand All @@ -46,7 +47,7 @@ def __init__(self) -> None:
self.service_account = "platform-input-support@open-targets-eu-dev.iam.gserviceaccount.com" #fmt: skip
self.service_account_scopes = ["https://www.googleapis.com/auth/drive"]

# Platform pipeline settings.
# Pipeline settings.
settings = read_yaml_config(self.platform_config_path)
self.gcs_url = settings["gcs_url"]
self.chembl_version = settings["chembl_version"]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""DAG for the Open Targets Platform pipeline."""
"""DAG for the Open Targets unified pipeline."""

from datetime import datetime

Expand All @@ -15,7 +15,7 @@
)
from airflow.utils.edgemodifier import Label

from ot_orchestration.dags.config.platform import PlatformConfig
from ot_orchestration.dags.config.unified_pipeline import PlatformConfig
from ot_orchestration.operators.dataproc import (
PlatformETLCreateClusterOperator,
PlatformETLSubmitJobOperator,
Expand All @@ -25,20 +25,20 @@
UploadRemoteFileOperator,
UploadStringOperator,
)
from ot_orchestration.operators.platform import PISDiffComputeOperator
from ot_orchestration.operators.unified_pipeline import PISDiffComputeOperator
from ot_orchestration.utils import clean_name, to_hocon, to_yaml
from ot_orchestration.utils.common import (
GCP_PROJECT_PLATFORM,
GCP_REGION,
GCP_ZONE,
platform_dag_kwargs,
shared_dag_args,
unified_pipeline_dag_kwargs,
)
from ot_orchestration.utils.labels import Labels

with DAG(
default_args=shared_dag_args,
**platform_dag_kwargs,
**unified_pipeline_dag_kwargs,
params={
"run_label": Param(
default=f"pis-{datetime.now().strftime('%Y%m%d-%H%M')}",
Expand Down Expand Up @@ -183,11 +183,14 @@ def etl_stage() -> None:
r = etl_stage()

d = DataprocDeleteClusterOperator(
task_id="cluster_delete",
task_id="etl_cluster_delete",
project_id=GCP_PROJECT_PLATFORM,
region=GCP_REGION,
cluster_name=cluster_name,
trigger_rule="all_success",
)

chain(p, r, d)

if __name__ == "__main__":
dag.test()
8 changes: 4 additions & 4 deletions src/ot_orchestration/utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,13 @@
"catchup": False,
}

platform_dag_kwargs = {
"dag_id": "platform_pipeline",
"description": "Open Targets Platform",
unified_pipeline_dag_kwargs = {
"dag_id": "unified_pipeline",
"description": "Open Targets unified data generation pipeline",
"catchup": False,
"schedule": None,
"start_date": pendulum.now(tz="Europe/London").subtract(days=1),
"tags": ["platform", "experimental"],
"tags": [*shared_dag_kwargs["tags"], "platform", "unified_pipeline"],
"user_defined_filters": {"strhash": strhash},
}

Expand Down