success_finish_operator.set_upstream([cli_download_sra, url_download, download_local_operator, copy_from_biowardrobe])


#
#  A STEP
#
error_finish_operator = MySqlOperator(
    task_id="finish_with_error",
    mysql_conn_id=biowardrobe_connection_id,
    sql="""update labdata set libstatus=2000,
        libstatustxt="{{ ti.xcom_pull(task_ids=['download_sra','download_local', 'download_aria2'], key=None) }}"
        where uid='{{ ti.xcom_pull(task_ids='branch_download', key='uid') }}'""",
    trigger_rule=TriggerRule.ONE_FAILED,
    autocommit=True,
    dag=dag)
error_finish_operator.set_upstream([cli_download_sra, url_download, download_local_operator, copy_from_biowardrobe])


#
#      BIOWARDROBE DOWNLOAD TRIGGER
#


dag_t = DAG(
    dag_id='biowardrobe_download_trigger',
    default_args={
        'owner': 'airflow',
        'start_date': datetime(1970, 1, 1, 1, 1, 1, 1),
        'depends_on_past': False,
        'email': ['*****@*****.**'],
        'email_on_failure': False,
Esempio n. 2
0
    'owner': 'liyi',
    'start_date': airflow.utils.dates.days_ago(0),
    'retries': 1,
    'retry_delay': timedelta(minutes=5)
}

dag1 = DAG('mysql_create_from', default_args=default_args, description='A simple create_mysql_data DAG',
           schedule_interval=timedelta(minutes=10))
# print(dag1)
# print(type(dag1))
sql1 = ["INSERT INTO airflow_test.test1_from (date1) values (now())"]

t1 = MySqlOperator(
    mysql_conn_id='liyi',
    sql=sql1,
    task_id='mysql_create_from',
    dag=dag1
)

sql2 = ["""insert into airflow_test.test1_in(date1)
select * from airflow_test.test1_from ORDER BY date1 DESC LIMIT 1;"""]

t2 = MySqlOperator(
    mysql_conn_id='liyi',
    sql=sql2,
    task_id='mysql_insert_into',
    dag=dag1
)

t2.set_upstream(t1)
Esempio n. 3
0
success_finish_operator.set_upstream([cli_download_sra, url_download, download_local_operator])


#
#  A STEP
#
error_finish_operator = MySqlOperator(
    task_id="finish_with_error",
    mysql_conn_id=biowardrobe_connection_id,
    sql="""update labdata set libstatus=2000,
        libstatustxt="{{ ti.xcom_pull(task_ids=['download_sra','download_local', 'download_aria2'], key=None) }}"
        where uid='{{ ti.xcom_pull(task_ids='branch_download', key='uid') }}'""",
    trigger_rule=TriggerRule.ONE_FAILED,
    autocommit=True,
    dag=dag)
error_finish_operator.set_upstream([cli_download_sra, url_download, download_local_operator])


#
#      BIOWARDROBE DOWNLOAD TRIGGER
#


dag_t = DAG(
    dag_id='biowardrobe_download_trigger',
    default_args={
        'owner': 'airflow',
        'start_date': days_ago(1),
        'depends_on_past': False,
        'email': ['*****@*****.**'],
        'email_on_failure': False,
	task_id='create_spotify_daily_top_50_global',
	dag=dag,
	mysql_conn_id="airflow_mysql_db",
	sql="""
	CREATE TABLE IF NOT EXISTS spotify_daily_top_50_global 
	(
	artist_name VARCHAR(100),
	track_id VARCHAR(100),
	track_name VARCHAR(100),
	duration_ms INT,
	explicit BOOL,
	popularity INT,
	daily_rank INT,
	album_id VARCHAR(100),
	album_name VARCHAR(100),
	album_type VARCHAR(100),
	release_date VARCHAR(100),
	dt VARCHAR(100)
	) 
	; 
	"""
)

populate_spotify_daily_top_50_global = BashOperator(
	task_id='populate_spotify_daily_top_50_global',
	dag=dag,
	bash_command='python /usr/local/airflow/dags/Scraper.py'
	)

create_spotify_daily_top_50_global.set_upstream(check_valid_mysql_connection)
populate_spotify_daily_top_50_global.set_upstream(check_valid_mysql_connection)
Esempio n. 5
0
          default_args=default_args,
          schedule_interval=timedelta(days=1))

# Task 1
t1 = BashOperator(task_id='print_date', bash_command='date', dag=dag)


# Task 2
def my_python_function():
    now = datetime.now()
    response = 'This function ran at ' + str(now)
    return response


t2 = PythonOperator(task_id='my_python_task',
                    python_callable=my_python_function,
                    params={'my_param': 'Parameter I passed in'},
                    dag=dag)

# Task 3
t3 = MySqlOperator(task_id='mariadb_task',
                   sql="INSERT INTO test VALUES (1, '이찬호', 'my first dag');",
                   mysql_conn_id='mariadb',
                   autocommit=True,
                   database="djob",
                   dag=dag)

# Pipeline Structure
t2.set_upstream(t1)
t3.set_upstream(t2)