import logging
import json
import os
import boto3
from json_logger import setup_logging

setup_logging(logging.INFO)
logger = logging.getLogger()

s3 = boto3.client('s3')
BUCKET_NAME = os.environ["BUCKET_NAME"]
KEY_NAME = os.environ["KEY_NAME"]


def get_covid_data():
    """
    Parameters
    ----------
    None
        We can include parameters in a future enhancement and include them in the WHERE clause of the SQL query
    Returns
    ------
    records: list, list of record (row) objects
    """

    res = s3.select_object_content(
        Bucket=BUCKET_NAME,
        Key=KEY_NAME,
        ExpressionType="SQL",
        Expression=f"""
            select * from s3object s
http://airflow.readthedocs.org/en/latest/tutorial.html
"""
import json
import logging
import subprocess

from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from datetime import datetime, timedelta

from airflow.operators.python_operator import PythonOperator

from json_logger import setup_logging, set_log_record_field

setup_logging(
    service_name='airflow',
    environment_name='dev'
)
log: logging = logging.getLogger(__name__)


def pp(o):
    return json.dumps(o, indent=2, default=str)


default_args = {
    "owner": "airflow",
    "depends_on_past": False,
    "start_date": datetime.now() + timedelta(seconds=10),
    "end_date": datetime.now() + timedelta(seconds=120),
    "email": ["*****@*****.**"],
    "email_on_failure": False,