Skip to content

Commit

Permalink
Clean up dynamic start_date values from docs (#19607)
Browse files Browse the repository at this point in the history
  • Loading branch information
josh-fell authored Nov 17, 2021
1 parent 355dec8 commit 26e4e11
Show file tree
Hide file tree
Showing 9 changed files with 40 additions and 26 deletions.
6 changes: 4 additions & 2 deletions UPDATING.md
Original file line number Diff line number Diff line change
Expand Up @@ -170,8 +170,9 @@ Similarly, `DAG.concurrency` has been renamed to `DAG.max_active_tasks`.
```python
dag = DAG(
dag_id="example_dag",
start_date=datetime(2021, 1, 1),
catchup=False,
concurrency=3,
start_date=days_ago(2),
)
```
Expand All @@ -180,8 +181,9 @@ dag = DAG(
```python
dag = DAG(
dag_id="example_dag",
start_date=datetime(2021, 1, 1),
catchup=False,
max_active_tasks=3,
start_date=days_ago(2),
)
```
Expand Down
10 changes: 2 additions & 8 deletions airflow/smart_sensor_dags/smart_sensor_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,11 @@
# under the License.

"""Smart sensor DAGs managing all smart sensor tasks."""
from datetime import timedelta
from datetime import datetime, timedelta

from airflow.configuration import conf
from airflow.models import DAG
from airflow.sensors.smart_sensor import SmartSensorOperator
from airflow.utils.dates import days_ago

args = {
'owner': 'airflow',
}

num_smart_sensor_shard = conf.getint("smart_sensor", "shards")
shard_code_upper_limit = conf.getint('smart_sensor', 'shard_code_upper_limit')
Expand All @@ -38,13 +33,12 @@
dag_id = f'smart_sensor_group_shard_{i}'
dag = DAG(
dag_id=dag_id,
default_args=args,
schedule_interval=timedelta(minutes=5),
max_active_tasks=1,
max_active_runs=1,
catchup=False,
dagrun_timeout=timedelta(hours=24),
start_date=days_ago(2),
start_date=datetime(2021, 1, 1),
)

SmartSensorOperator(
Expand Down
7 changes: 6 additions & 1 deletion docs/apache-airflow/best-practices.rst
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,12 @@ Then you can import and use the ``ALL_TASKS`` constant in all your DAGs like tha
from my_company_utils.common import ALL_TASKS
with DAG(dag_id="my_dag", schedule_interval=None, start_date=days_ago(2)) as dag:
with DAG(
dag_id="my_dag",
schedule_interval=None,
start_date=datetime(2021, 1, 1),
catchup=False,
) as dag:
for task in ALL_TASKS:
# create your operators and relations here
pass
Expand Down
8 changes: 5 additions & 3 deletions docs/apache-airflow/concepts/dags.rst
Original file line number Diff line number Diff line change
Expand Up @@ -37,18 +37,20 @@ Declaring a DAG
There are three ways to declare a DAG - either you can use a context manager,
which will add the DAG to anything inside it implicitly::

with DAG("my_dag_name") as dag:
with DAG(
"my_dag_name", start_date=datetime(2021, 1, 1), schedule_interval="@daily", catchup=False
) as dag:
op = DummyOperator(task_id="task")

Or, you can use a standard constructor, passing the dag into any
operators you use::

my_dag = DAG("my_dag_name")
my_dag = DAG("my_dag_name", start_date=datetime(2021, 1, 1), schedule_interval="@daily", catchup=False)
op = DummyOperator(task_id="task", dag=my_dag)

Or, you can use the ``@dag`` decorator to :ref:`turn a function into a DAG generator <concepts:dag-decorator>`::

@dag(start_date=days_ago(2))
@dag(start_date=datetime(2021, 1, 1), schedule_interval="@daily", catchup=False)
def generate_dag():
op = DummyOperator(task_id="task")

Expand Down
3 changes: 2 additions & 1 deletion docs/apache-airflow/concepts/operators.rst
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,8 @@ you can pass ``render_template_as_native_obj=True`` to the DAG as follows:
dag = DAG(
dag_id="example_template_as_python_object",
schedule_interval=None,
start_date=days_ago(2),
start_date=datetime(2021, 1, 1),
catchup=False,
render_template_as_native_obj=True,
)
Expand Down
10 changes: 8 additions & 2 deletions docs/apache-airflow/dag-run.rst
Original file line number Diff line number Diff line change
Expand Up @@ -229,11 +229,17 @@ Example of a parameterized DAG:

.. code-block:: python
from datetime import datetime
from airflow import DAG
from airflow.operators.bash import BashOperator
from airflow.utils.dates import days_ago
dag = DAG("example_parameterized_dag", schedule_interval=None, start_date=days_ago(2))
dag = DAG(
"example_parameterized_dag",
schedule_interval=None,
start_date=datetime(2021, 1, 1),
catchup=False,
)
parameterized_task = BashOperator(
task_id="parameterized_task",
Expand Down
9 changes: 4 additions & 5 deletions docs/apache-airflow/lineage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -30,22 +30,21 @@ works.

.. code-block:: python
from datetime import datetime, timedelta
from airflow.operators.bash import BashOperator
from airflow.operators.dummy import DummyOperator
from airflow.lineage import AUTO
from airflow.lineage.entities import File
from airflow.models import DAG
from airflow.utils.dates import days_ago
from datetime import timedelta
FILE_CATEGORIES = ["CAT1", "CAT2", "CAT3"]
args = {"owner": "airflow", "start_date": days_ago(2)}
dag = DAG(
dag_id="example_lineage",
default_args=args,
start_date=datetime(2021, 1, 1),
schedule_interval="0 0 * * *",
catchup=False,
dagrun_timeout=timedelta(minutes=60),
)
Expand Down
3 changes: 2 additions & 1 deletion docs/apache-airflow/tutorial.rst
Original file line number Diff line number Diff line change
Expand Up @@ -487,7 +487,8 @@ Lets look at our DAG:
@dag(
schedule_interval="0 0 * * *",
start_date=datetime.today() - timedelta(days=2),
start_date=datetime(2021, 1, 1),
catchup=False,
dagrun_timeout=timedelta(minutes=60),
)
def Etl():
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,19 @@

from airflow.models.dag import DAG
from airflow.operators.dummy import DummyOperator
from airflow.utils.dates import days_ago

now = datetime.now()
now_to_the_hour = (now - timedelta(0, 0, 0, 0, 0, 3)).replace(minute=0, second=0, microsecond=0)
START_DATE = now_to_the_hour
DAG_NAME = 'test_dag_v1'

default_args = {'owner': 'airflow', 'depends_on_past': True, 'start_date': days_ago(2)}
dag = DAG(DAG_NAME, schedule_interval='*/10 * * * *', default_args=default_args)
dag = DAG(
DAG_NAME,
schedule_interval='*/10 * * * *',
default_args={'depends_on_past': True},
start_date=datetime(2021, 1, 1),
catchup=False,
)

run_this_1 = DummyOperator(task_id='run_this_1', dag=dag)
run_this_2 = DummyOperator(task_id='run_this_2', dag=dag)
Expand Down

0 comments on commit 26e4e11

Please sign in to comment.