def begin_pipeline(**kwargs):
    dp = DagPebbles()
    pipeline = dp.get_current_pipeline()
    print(pipeline)
    packed_dir=os.environ.get("BCH_HPDS_INTERNAL") 
    kwargs["ti"].xcom_push(key='packed_dir', value=packed_dir)
    hpds_encrypted_file_name = dp.get_hpds_packed_file_name()+".encrypted"
    kwargs["ti"].xcom_push(key='hpds_encrypted_file_name', value=hpds_encrypted_file_name) 
    hpds_encrypted_file = packed_dir + '/' + hpds_encrypted_file_name
    kwargs["ti"].xcom_push(key='hpds_encrypted_file', value=hpds_encrypted_file)
def begin_pipeline(**kwargs):
    print("begin_pipeline:")
    dp = DagPebbles()
    pipeline = dp.get_current_pipeline() 
    s3_bucket = os.environ.get("S3_BUCKET","")
    folder_path = pipeline['log_file_path']  
    s3_file = pipeline['log_file_name'] 
    download_key = dp.get_download_key(s3_bucket, folder_path, s3_file) 
    kwargs["ti"].xcom_push(key="folder_path", value=folder_path)
    kwargs["ti"].xcom_push(key="s3_file", value=s3_file)   
    kwargs["ti"].xcom_push(key="download_key", value=download_key) 
Ejemplo n.º 3
0
def begin_pipeline(**kwargs):
    dp = DagPebbles()
    pipeline = dp.get_current_pipeline()
    print(pipeline)
    current_time = datetime.now()
    packed_file_name = "hpds_phenotype_" + os.environ.get(
        "BCH_PIC_SURE_HPDS_ALS_TAG") + "_" + current_time.strftime(
            '%m_%d_%Y_%H_%M_%S') + "_" + os.environ.get(
                "BCH_PIC_SURE_HPDS_ETL_TAG") + ".tar.gz"
    packed_dir = os.environ.get("BCH_HPDS_INTERNAL")
    kwargs["ti"].xcom_push(key='packed_file_name', value=packed_file_name)
    kwargs["ti"].xcom_push(key='packed_dir', value=packed_dir)
    dp.save_hpds_package_file_name(packed_file_name)
 t_end = PythonOperator(
     task_id="end",
     python_callable=end,
     provide_context=True,
     trigger_rule="none_failed",
     dag=dag,
 )
 
 
 t_pipeline_begin >> t_check_pipeline
 t_check_pipeline >> t_pipeline_check_skipped >> t_end_pipeline 
 t_check_pipeline >> t_pipeline_check_passed
 
 try: 
     dp = DagPebbles() 
     pipeline = dp.get_current_pipeline()
     s3_file = pipeline['log_file_name'] 
     target_log_file = pipeline['log_file_name'].replace(".encrypted", "")
     decrypt_log_file_cmd = "/opt/bitnami/airflow/airflow-data/scripts/decrypt_s3_file.sh  " + s3_file + " {{ ti.xcom_pull(key='SKIP_DECRYPT_FILES')}} "
     t_decrypt_log_file = BashOperator(
         task_id='decrypt_log_file',
         bash_command=decrypt_log_file_cmd,
         dag=dag) 
     t_pipeline_check_passed >> t_decrypt_log_file 
             
     files = dp.get_files(log_file_id = None, type = 'decrypt')
     
     if files == None or len(files) == 0:
         t_decrypt_log_file  >> t_end_pipeline
     else:
         for index, s3_file in enumerate(files):
Ejemplo n.º 5
0
def begin_pipeline(**kwargs):
    dp = DagPebbles()
    pipeline = dp.get_current_pipeline()
    print(pipeline)