dp = DagPebbles() 
     pipeline = dp.get_current_pipeline()  
     s3_file = pipeline['log_file_name']
     s3_file = DATA_LOCATION + "/"+ s3_file
     transfer_log_file_cmd = "perl  /opt/bitnami/airflow/airflow-data/scripts/transfer_file_rds.pl   " +  s3_file + "   {{ ti.xcom_pull(key='SKIP_TRANSFER_FILES')}}"
     
     print("transfer_log_file_cmd: ")
     print(transfer_log_file_cmd)
     t_transfer_log_file = BashOperator(
         task_id='transfer_log_file',
         bash_command=transfer_log_file_cmd,
         dag=dag)
     
     t_pipeline_check_passed >> t_transfer_log_file 
                     
     files = dp.get_files(log_file_id = None, type = 'transfer')
     
     if files == None or len(files) == 0:
         t_transfer_log_file  >> t_end_pipeline
     else:
         for index, file in enumerate(files):
             s3_file = DATA_LOCATION + "/"+ file
             transfer_file_cmd = "perl  /opt/bitnami/airflow/airflow-data/scripts/transfer_file_rds.pl   " +  s3_file + "   {{ ti.xcom_pull(key='SKIP_TRANSFER_FILES')}}"
             t_transfer_dmp_file = BashOperator(
                 task_id='transfer_dmp_file_'+str(index),
                 bash_command=transfer_file_cmd,
                 dag=dag)
             t_transfer_log_file >> t_transfer_dmp_file >> t_end_pipeline    
 except Exception as e:
     print(e) 
     t_transfer_log_file = DummyOperator(dag=dag, task_id='transfer_log_file')
 t_check_pipeline >> t_pipeline_check_skipped >> t_end_pipeline 
 t_check_pipeline >> t_pipeline_check_passed
 
 try: 
     dp = DagPebbles() 
     pipeline = dp.get_current_pipeline()
     s3_file = pipeline['log_file_name'] 
     target_log_file = pipeline['log_file_name'].replace(".encrypted", "")
     decrypt_log_file_cmd = "/opt/bitnami/airflow/airflow-data/scripts/decrypt_s3_file.sh  " + s3_file + " {{ ti.xcom_pull(key='SKIP_DECRYPT_FILES')}} "
     t_decrypt_log_file = BashOperator(
         task_id='decrypt_log_file',
         bash_command=decrypt_log_file_cmd,
         dag=dag) 
     t_pipeline_check_passed >> t_decrypt_log_file 
             
     files = dp.get_files(log_file_id = None, type = 'decrypt')
     
     if files == None or len(files) == 0:
         t_decrypt_log_file  >> t_end_pipeline
     else:
         for index, s3_file in enumerate(files):
             decrypt_dmp_file_cmd = "/opt/bitnami/airflow/airflow-data/scripts/decrypt_s3_file.sh  " + s3_file + " {{ ti.xcom_pull(key='SKIP_DECRYPT_FILES')}} "
             t_decrypt_dmp_file = BashOperator(
                 task_id='decrypt_dmp_file_'+str(index),
                 bash_command=decrypt_dmp_file_cmd,
                 dag=dag)
             t_decrypt_log_file >> t_decrypt_dmp_file >> t_end_pipeline    
 except Exception as e:
     print(e) 
     t_decrypt_log_file = DummyOperator(dag=dag, task_id='decrypt_log_file')
     t_pipeline_check_passed >> t_decrypt_log_file >> t_end_pipeline
 
 try: 
     dp = DagPebbles() 
     pipeline = dp.get_current_pipeline()  
     s3_bucket = os.environ.get("S3_BUCKET","")
     folder_path = pipeline['log_file_path']  
     s3_file = pipeline['log_file_name']
     download_key = dp.get_download_key(s3_bucket, folder_path, s3_file) 
     download_log_file_cmd = "/opt/bitnami/airflow/airflow-data/scripts/download_s3_file.sh  " + download_key + " "  + s3_file +  " " + "N"
     t_download_log_file = BashOperator(
         task_id='download_log_file',
         bash_command=download_log_file_cmd,
         dag=dag) 
     t_pipeline_check_passed >> t_download_log_file
     
     files = dp.get_files(log_file_id = None, type = 'download')
     if files == None or len(files) == 0:
         t_download_log_file  >> t_end_pipeline
     else:
         for index, file in enumerate(files):
             s3_bucket = os.environ.get("S3_BUCKET","")
             folder_path = pipeline['log_file_path']  
             s3_file = file
             download_key = dp.get_download_key(s3_bucket, folder_path, s3_file) 
             
             download_dmp_file_cmd = "/opt/bitnami/airflow/airflow-data/scripts/download_s3_file.sh  " + download_key + " "  + s3_file +  " " + " {{ ti.xcom_pull(key='SKIP_DOWNLOAD_FILES')}} "                                
             t_download_dmp_file = BashOperator(
                 task_id='download_dmp_file_'+str(index),
                 bash_command=download_dmp_file_cmd,
                 dag=dag)
             t_download_log_file >> t_download_dmp_file >> t_end_pipeline