airflow start_date schedule_interval启动时间

1、airflow dags文件airflow.py更新dagid后要重新 airflow initdb

后台执行

airflow webserver &
 
airflow scheduler &

2、 错误:The scheduler does not appear to be running. Last heartbeat was received 3 d

需要重新执行airflow scheduler

start_date 开始时间

参考:https://blog.csdn.net/OldDirverHelpMe/article/details/106843857

Airflow调度程序的时候第一次执行的时间为:start_date+schedular_interval

schedule_interval 间隔周期

cron:
参考:https://blog.csdn.net/jsklnice/article/details/114375306

# ┌───────────── minute (0 - 59)
# │ ┌───────────── hour (0 - 23)
# │ │ ┌───────────── day of the month (1 - 31)
# │ │ │ ┌───────────── month (1 - 12)
# │ │ │ │ ┌───────────── day of the week (0 - 6) (Sunday to Saturday;
# │ │ │ │ │                                   7 is also Sunday on some systems)
# │ │ │ │ │
# │ │ │ │ │
# * * * * * <command to execute>

timedelta:

from datetime import timedelta
timedelta(minutes=3)
timedelta(hours=3)
timedelta(days=3)
# coding: utf-8
import airflow
from airflow import DAG
from airflow.operators.python_operator import PythonOperator
from airflow.operators.bash_operator import BashOperator
from datetime import timedelta,datetime as datetime1
import datetime as datetime2
 
dt = datetime1.now()-datetime2.timedelta(hours=1)
 
# 定义默认参数
default_args = {
    'owner': 'fjk',  # 拥有者名称
    'depends_on_past': True,   # 是否依赖上一个自己的执行状态
    'start_date': datetime1(dt.year,dt.month,dt.day,dt.hour)
    #'start_date': airflow.utils.dates.days_ago(2),
}
 
# 定义DAG
dag = DAG(
    dag_id='20007_as_h',  # dag_id
    default_args=default_args,  # 指定默认参数
    #schedule_interval='*/5 * * * *',  # 执行周期,依次是分,时,天,月,年,此处表示每个整点执行
    schedule_interval=timedelta(hours=1)
)
 
"""
2.通过BashOperator定义执行bash命令的任务
"""
 
t1 = BashOperator(   #将模型的文本就行参数的修改
    task_id='task1',
    depends_on_past=True,
    bash_command='sed -ie "s/(start)/$(date -d "10 minute ago" +"%Y-%m-%d %H:00:00")/g" /opt/model/20007_tj_h_as.txt&&sed -i "s/(end)/$(date -d "10 minute ago" +"%Y-%m-%d %H:59:59")/g" /opt/model/20007_tj_h_as.txt&&sed -i "s/(ip)/172.21.1.237/g" /opt/model/20007_tj_h_as.txt&&sed -ie "s/(start)/$(date -d "12 hour ago" +"%Y-%m-%d %H:%M:00")/g" /opt/model/20007_yc_as_h.txt&&sed -i "s/(end)/$(date -d now +"%Y-%m-%d %H:%M:00")/g" /opt/model/20007_yc_as_h.txt&&sed -i "s/(ip)/172.21.1.237/g" /opt/model/20007_yc_as_h.txt',
    dag=dag
)
 
# 进行统计模型的启动
 
t2 = BashOperator(   #通过BashOperator定义执行bash命令的任务
    task_id='task2',
    depends_on_past=True,
    bash_command='sh /topsec/spark-2.3.0-hadoop2.7/bin/spark-submit --jars /opt/spark-launcher.jar --class io.xknow.spark.ContainerOperatorLauncher spark-internal --context /opt/software/context.txt --operatorJarHome /user/patronus/operators/SPARK --process /opt/model/20007_tj_h_as.txt >> /opt/log/20007_tj_h_as.log 2>&1',
    dag=dag
)
 
# 进行预测模型的启动
t3 = BashOperator(   #通过BashOperator定义执行bash命令的任务
    task_id='task3',
    depends_on_past=True,
    bash_command='sh /topsec/spark-2.3.0-hadoop2.7/bin/spark-submit --jars /opt/spark-launcher.jar --class io.xknow.spark.ContainerOperatorLauncher spark-internal --context /opt/software/context.txt --operatorJarHome /user/patronus/operators/SPARK --process /opt/model/20007_yc_as_h.txt >> /opt/log/20007_yc_as_h.log 2>&1',
    dag=dag
)
 
t4 = BashOperator(   #将模型的文本就行参数的修改
    task_id='task4',
    depends_on_past=True,
    bash_command='rm -rf /opt/model/20007_tj_h_as.txt&&mv /opt/model/20007_tj_h_as.txte /opt/model/20007_tj_h_as.txt&&rm -rf /opt/model/20007_yc_as_h.txt&&mv /opt/model/20007_yc_as_h.txte /opt/model/20007_yc_as_h.txt',
    dag=dag
)
 
t1 >> t2 >> t3 >> t4

Guess you like

Origin blog.csdn.net/weixin_42357472/article/details/121229246