def test_sqlite_operator_with_multiple_statements(self): sql = [ "CREATE TABLE IF NOT EXISTS test_airflow (dummy VARCHAR(50))", "INSERT INTO test_airflow VALUES ('X')", ] op = SqliteOperator(task_id='sqlite_operator_with_multiple_statements', sql=sql, dag=self.dag) op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_sqlite_operator_with_one_statement(self): sql = """ CREATE TABLE IF NOT EXISTS test_airflow ( dummy VARCHAR(50) ); """ op = SqliteOperator(task_id='basic_sqlite', sql=sql, dag=self.dag) op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_sqlite_operator_with_invalid_sql(self): sql = [ "CREATE TABLE IF NOT EXISTS test_airflow (dummy VARCHAR(50))", "INSERT INTO test_airflow2 VALUES ('X')", ] from sqlite3 import OperationalError try: op = SqliteOperator( task_id='sqlite_operator_with_multiple_statements', sql=sql, dag=self.dag) op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) pytest.fail("An exception should have been thrown") except OperationalError as e: assert 'no such table: test_airflow2' in str(e)
'email': user['email'] }) processed_user.to_csv('/tmp/processed_user.csv', index=None, header=False) with DAG('user_processing', schedule_interval='@daily', default_args=default_args, catchup=True) as dag: creating_table = SqliteOperator(task_id='creating_table', sqlite_conn_id='db_sqlite', sql=''' CREATE TABLE IF NOT EXISTS users ( firstname TEXT NOT NULL, lastname TEXT NOT NULL, country TEXT NOT NULL, username TEXT NOT NULL, password TEXT NOT NULL, email TEXT NOT NULL PRIMARY KEY ); ''') is_api_available = HttpSensor(task_id='is_api_available', http_conn_id='user_api', endpoint='api/') extracting_user = SimpleHttpOperator( task_id='extracting_user', http_conn_id='user_api', endpoint='api/', method='GET',
'username': user['login']['username'], 'password': user['login']['password'], 'email': user['email'] }) processed_user.to_csv('/tmp/processed_user.csv', index=None, Header=Fales) with DAG('user_processing', schedule_interval='@daily', start_date=datetime(2021,4,10), catchup=False) as dag: creating_table =SqliteOperator( task_id = 'creating_user_table', sqlite_conn_id='db_sqlite', sql = ''' CREATE TABLE IF NOT EXISTS users ( firstname TEXT NOT NULL, lastname TEXT NOT NULL, country TEXT NOT NULL, username TEXT NOT NULL, password TEXT NOT NULL, email TEXT NOT NULL PRIMARY KEY ); ''' is_api_available = HTTPSensor( task_id='is_api_available', http_conn_id ='user_api',
dag = DAG( dag_id='example_sqlite', schedule_interval='@daily', start_date=days_ago(2), tags=['example'], ) # [START howto_operator_sqlite] # Example of creating a task that calls a common CREATE TABLE sql command. create_table_sqlite_task = SqliteOperator( task_id='create_table_sqlite', sqlite_conn_id='sqlite_conn_id', sql=r""" CREATE TABLE table_name ( column_1 string, column_2 string, column_3 string ); """, dag=dag, ) # [END howto_operator_sqlite] # [START howto_operator_sqlite_external_file] # Example of creating a task that calls an sql command from an external file. external_create_table_sqlite_task = SqliteOperator( task_id='create_table_sqlite_external_file', sqlite_conn_id='sqlite_conn_id', sql='/scripts/create_table.sql',
from airflow import DAG from airflow.providers.sqlite.operators.sqlite import SqliteOperator from datetime import datetime default_args = {'start_date': datetime(2020, 1, 1)} with DAG('user_processing', schedule_interval='@daily', default_args=default_args, catchup=False) as dag: creating_table = SqliteOperator(task_id='creating_table', sqlite_conn_id='db_sqlite', sql=''' CREATE TABLE users ( firtname TEXT NOT NULL, lastname TEXT not null, country TEXT NOT NULL, username text not null, password text not null, emailid text not null primary key ); ''')
check_api = HttpSensor( task_id="check_api", endpoint="world", response_check=lambda response: True if len(response.json()) > 0 else False, ) download_covid19_data = PythonOperator( task_id="download_covid19_data", python_callable=_download_covid19_data, ) create_table = SqliteOperator(task_id="create_db", sqlite_conn_id="sqlite_default", sql=""" CREATE TABLE IF NOT EXISTS covid19 ( NewConfirmed TEXT NOT NULL ); """) load_data_to_db = BashOperator(task_id="load_data_to_db", bash_command=""" sqlite3 -separator "," /Users/atb/covid19-{{ ds }}.db ".import /Users/atb/covid19-{{ ds }}.csv covid19" """) send_mail = EmailOperator( task_id="send_email", to=[ "*****@*****.**", ], subject="Finished loading COVID-19 data to db!",
dag_id='example_sqlite', schedule_interval='@daily', start_date=datetime(2021, 1, 1), tags=['example'], catchup=False, ) # [START howto_operator_sqlite] # Example of creating a task that calls a common CREATE TABLE sql command. create_table_sqlite_task = SqliteOperator( task_id='create_table_sqlite', sql=r""" CREATE TABLE Customers ( customer_id INT PRIMARY KEY, first_name TEXT, last_name TEXT ); """, dag=dag, ) # [END howto_operator_sqlite] @dag.task(task_id="insert_sqlite_task") def insert_sqlite_hook(): sqlite_hook = SqliteHook() rows = [('James', '11'), ('James', '22'), ('James', '33')] target_fields = ['first_name', 'last_name']
processed_user.to_csv("/tmp/processed_user.csv", index=None, header=False) with DAG('user_processing', schedule_interval='@daily', default_args=default_args, catchup=False) as dag: creating_table = SqliteOperator( task_id='creating_table', sqlite_conn_id='db_sqlite', sql=''' create table if not exists users( firstname text not null, lastname text not null, country text not null, username text not null, password text not null, email text Primary key ); ''' ) is_api_available = HttpSensor( task_id='is_api_available', http_conn_id='user_api', endpoint='api/' ) extracting_user = SimpleHttpOperator( task_id='extracting_user',
task_id='print_prev_ds', bash_command='echo {{ prev_ds }} {{ macros.ds_add("2015-01-01", 5) }}') check_api = HttpSensor( task_id='check_api', endpoint='/world', response_check=lambda response: response.status_code == 200) download_covid19_data = PythonOperator( task_id='download_covid19_data', python_callable=_download_covid19_data) create_table = SqliteOperator(task_id='create_table', sqlite_conn_id='sqlite_default', sql=''' CREATE TABLE IF NOT EXISTS covid19 ( NewConfirmed TEXT NOT NULL ); ''') load_data_to_db = BashOperator(task_id='load_data_to_db', bash_command=''' sqlite3 -separator "," /Users/yothinix/projects/_class/my-airflow/covid19.db ".import /Users/yothinix/projects/_class/my-airflow/covid19-{{ ds }}.csv covid19" ''') # if we have f-string we need to use 4: {{{{ XXX }}}} but template will use just 2 {{ ds }} send_email = EmailOperator( task_id='send_email', to=[ '*****@*****.**', ],
start = Variable.get(key="start", default_var="starting point") end = Variable.get(key="end", default_var="ending point") start_point = BashOperator(dag=dag, task_id="start_point", bash_command=f"echo '{start}'") end_point = BashOperator(dag=dag, task_id="end_point", bash_command=f"echo '{end}'") create_table = SqliteOperator( dag=dag, task_id="create_table", sqlite_conn_id="db_sqlite", sql=f""" CREATE TABLE IF NOT EXISTS {_TABLE_NAME} ( name TEXT NOT NULL, country TEXt NOT NULL ); """, ) check_api_available = HttpSensor( dag=dag, task_id="check_api_available", http_conn_id=_API_NAME, endpoint="api/", ) extract_user = SimpleHttpOperator( dag=dag,
from airflow.models import DAG from airflow.providers.sqlite.operators.sqlite import SqliteOperator from datetime import datetime import airflow default_args = {'start_date': datetime(2021, 1, 1)} with DAG('user_processing', schedule_interval='@daily', start_date=datetime(2021, 1, 1), catchup=False) as dag: creating_table = SqliteOperator(task_id='creating_table', sqlite_conn_id='db_sqlite', sql=''' CREATE TABLE users (user_id INTEGER PRIMARY_KEY AUTOINCREMENT, firstname TEXT NOT NULL, lastname TEXT NOT NULL, country TEXT NOT NULL, username TEXT NOT NULL, password TEXT NOT NULL, email TEXT NOT NULL PRIMARY KEY); ''')