'Key': key }]}) with DAG( dag_id='example_sagemaker_endpoint', schedule_interval=None, start_date=datetime(2021, 1, 1), tags=['example'], catchup=False, ) as dag: upload_data = S3CreateObjectOperator( task_id='upload_data', s3_bucket=S3_BUCKET, s3_key=f'{INPUT_DATA_S3_KEY}/train.csv', data=TRAIN_DATA, replace=True, ) train_model = SageMakerTrainingOperator( task_id='train_model', config=TRAINING_CONFIG, do_xcom_push=False, ) create_model = SageMakerModelOperator( task_id='create_model', config=MODEL_CONFIG, do_xcom_push=False, )
bucket_name=BUCKET_NAME, ) # [END howto_operator_s3_get_bucket_tagging] # [START howto_operator_s3_delete_bucket_tagging] delete_tagging = S3DeleteBucketTaggingOperator( task_id='s3_delete_bucket_tagging', bucket_name=BUCKET_NAME, ) # [END howto_operator_s3_delete_bucket_tagging] # [START howto_operator_s3_create_object] create_object = S3CreateObjectOperator( task_id="s3_create_object", s3_bucket=BUCKET_NAME, s3_key=KEY, data=DATA, replace=True, ) # [END howto_operator_s3_create_object] # [START howto_operator_s3_list_prefixes] list_prefixes = S3ListPrefixesOperator( task_id="s3_list_prefix_operator", bucket=BUCKET_NAME, prefix=PREFIX, delimiter=DELIMITER, ) # [END howto_operator_s3_list_prefixes] # [START howto_operator_s3_list]
QUERY_DROP_TABLE = f""" DROP TABLE IF EXISTS {ATHENA_DATABASE}.{ATHENA_TABLE} """ with DAG( dag_id='example_athena', schedule_interval=None, start_date=datetime(2021, 1, 1), tags=['example'], catchup=False, ) as dag: upload_sample_data = S3CreateObjectOperator( task_id='upload_sample_data', s3_bucket=S3_BUCKET, s3_key=f'{S3_KEY}/{ATHENA_TABLE}/{SAMPLE_FILENAME}', data=SAMPLE_DATA, replace=True, ) create_table = AthenaOperator( task_id='create_table', query=QUERY_CREATE_TABLE, database=ATHENA_DATABASE, output_location=f's3://{S3_BUCKET}/{S3_KEY}', ) # [START howto_operator_athena] read_table = AthenaOperator( task_id='read_table', query=QUERY_READ_TABLE,