generated from interTwin-eu/repository-template
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdags_airflow_fede_rucio_k8soperator.py
59 lines (51 loc) · 1.52 KB
/
dags_airflow_fede_rucio_k8soperator.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
"""
Create a pod that can access rucio endpoint at CNAF
- Using k8s operator
"""
from datetime import datetime
from airflow import DAG
from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator
from kubernetes.client import models as k8s
default_args = {
"owner": "airflow",
"start_date": datetime(2020, 1, 1),
}
dag = DAG(
"rucio_operator", default_args=default_args, tags=["fede"], schedule_interval=None
)
repo = "leggerf/rucio-intertwin"
tag = "0.0.0"
k = KubernetesPodOperator(
namespace="airflow",
image=f"{repo}:{tag}", # image="ubuntu:16.04",
image_pull_secrets=[k8s.V1LocalObjectReference("dockerhub")],
image_pull_policy="Always",
cmds=["./get-token.sh"],
# cmds=["bash", "-cx"],
# arguments=["pwd", "ls"],
labels={"foo": "bar"},
name="test-data-access",
task_id="data-access",
is_delete_operator_pod=True, # delete pod after execution
hostnetwork=False,
startup_timeout_seconds=900,
dag=dag,
)
k1 = KubernetesPodOperator(
namespace="airflow",
image=f"{repo}:{tag}", # image="ubuntu:16.04",
image_pull_secrets=[k8s.V1LocalObjectReference("dockerhub")],
image_pull_policy="Always",
cmds=["./get-token.sh"],
# cmds=["bash", "-cx"],
# arguments=["pwd", "ls"],
labels={"foo": "bar"},
name="test-data-access-1",
task_id="data-access-1",
is_delete_operator_pod=True, # delete pod after execution
hostnetwork=False,
startup_timeout_seconds=900,
dag=dag,
)
# define DAG pipeline
(k >> k1)