from airflow.models import DAG
from util.settings import default_settings
from util.dummy import dummy

with DAG(**default_settings()) as dag:
    dummy('hello-world') >> dummy('first') >> dummy('second')
from airflow.operators.bash import BashOperator
from airflow.hooks.base import BaseHook
from airflow.models import Variable
import pandas as pd
from airflow.decorators import dag, task
from util.settings import default_settings
from airflow.utils.dates import days_ago

default_args = {
    'owner': 'airflow',
}


@dag(**default_settings())
def hw_4_dag():
    """
    # Homework No.4 dag - same functionality as No.3 but
    realization with @dag and @task features
    """
    @task()
    def download_titanic_dataset(
        url='https://web.stanford.edu/class/archive/cs/cs109/cs109.1166/stuff/titanic.csv'
    ):
        """
        # download_titanic_dataset task
        This task downloads the sample titanic dataset from predefined URL and returns
        {'titanic_df' : 'dataframe represented as json data in STR format'}
        It uses methodology of @task decorator covering all XCOM push / pull mechanics under the hood
        """
        df = pd.read_csv(url)
        return {'titanic_df_json_str': df.to_json()}