def test_execute(self, mock_remove, mock_bulk_load_custom,
                     mock_download_file):
        S3ToMySqlOperator(**self.s3_to_mysql_transfer_kwargs).execute({})

        mock_download_file.assert_called_once_with(
            key=self.s3_to_mysql_transfer_kwargs['s3_source_key'])
        mock_bulk_load_custom.assert_called_once_with(
            table=self.s3_to_mysql_transfer_kwargs['mysql_table'],
            tmp_file=mock_download_file.return_value,
            duplicate_key_handling=self.
            s3_to_mysql_transfer_kwargs['mysql_duplicate_key_handling'],
            extra_options=self.
            s3_to_mysql_transfer_kwargs['mysql_extra_options'])
        mock_remove.assert_called_once_with(mock_download_file.return_value)
    flights_json_to_table = JsonToCsvOperator(
        task_id='aggregated.json_flights_to_csv',
        json_filename='/opt/airflow/data/landing/{{ds_nodash}}/flight_*',
        csv_filename='/opt/airflow/data/aggregated/flights_{{ds_nodash}}.csv')

    upload_agregated_flights = BashOperator(
        task_id="aggregated.upload_flights",
        bash_command=
        'aws s3 cp /opt/airflow/data/aggregated/flights_{{ds_nodash}}.csv s3://fligoo.data-science/esteban.delboca/aggregated/'
    )

    dump_to_mysql = S3ToMySqlOperator(
        task_id="optimize.dump_to_mysql",
        s3_source_key=
        "s3://fligoo.data-science/esteban.delboca/aggregated/flights_{{ds_nodash}}.csv",
        mysql_table='flights',
        mysql_extra_options="""
        FIELDS TERMINATED BY ','
        IGNORE 1 LINES
        """)

    export_ds_canceled_flights = MySQLToS3Operator(
        task_id="optimize.export.canceled_flights",
        query=
        "SELECT airline_code, flight_number, flight_date FROM flights WHERE flight_status = 'cancelled';",
        s3_bucket=default_args['s3_bucket'],
        s3_key="{}/optimized/canceled_flights_{{ds_nodash}}.csv".format(
            default_args['s3_folder']),
        header=True)

    export_ds_active_flights = MySQLToS3Operator(