-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path0affe012670f_dag.py
43 lines (35 loc) · 1.13 KB
/
0affe012670f_dag.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
from airflow import DAG
from airflow.providers.databricks.operators.databricks import DatabricksSubmitRunOperator, DatabricksRunNowOperator
from datetime import datetime, timedelta
#Define params for Submit Run Operator
notebook_task = {
'notebook_path': '/Workspace/Users/[email protected]/mount_s3_bucket_to_databricks',
}
#Define params for Run Now Operator
notebook_params = {
"Variable":5
}
default_args = {
'owner': '0affe012670f',
'depends_on_past': False,
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=2)
}
with DAG('0affe012670f_dag',
# should be a datetime format
start_date=datetime(2024,8,4),
# check out possible intervals, should be a string
schedule_interval='@daily',
catchup=False,
default_args=default_args
) as dag:
opr_submit_run = DatabricksSubmitRunOperator(
task_id='submit_run',
# the connection we set-up previously
databricks_conn_id='databricks_default',
existing_cluster_id='1108-162752-8okw8dgg',
notebook_task=notebook_task
)
opr_submit_run