免费注册
帮助文档(华北一、二)

  • 详情请参考官网介绍

    请将如下代码放置于uhadoop-******-master2的/home/hadoop/airflow/dags目录下

    如无此目录,则需手动创建,并修改用户组为hadoop。

     
    cat tutorial.py 
    """ 
    Code that goes along with the Airflow tutorial located at: 
    https://github.com/airbnb/airflow/blob/master/airflow/example_dags/tutorial.py 
    """ 
    from airflow import DAG 
    from airflow.operators.bash_operator import BashOperator 
    from datetime import datetime, timedelta    
    default_args = {     
    'owner': 'airflow',     
    'depends_on_past': False,     
    'start_date': datetime(2015, 6, 1),     
    'email': ['airflow@airflow.com'],     
    'email_on_failure': False,     
    'email_on_retry': False,     
    'retries': 1,     
    'retry_delay': timedelta(minutes=5),     
    # 'queue': 'bash_queue',     
    # 'pool': 'backfill',     
    # 'priority_weight': 10,     
    # 'end_date': datetime(2016, 1, 1), 
    }   
    dag = DAG('tutorial', default_args=default_args) 
    # t1, t2 and t3 are examples of tasks created by instantiating operators 
    t1 = BashOperator(     
      task_id='print_date',     
      bash_command='date',     
      dag=dag)   
    t2 = BashOperator(     
      task_id='sleep',     
      bash_command='sleep 5',     
      retries=3,     
      dag=dag)   
    templated_command = """     
      {% for i in range(5) %}         
        echo "{{ ds }}"