# based on number of tasks.
              # Although, if we run regularaly, it should be pretty consistent.
              # Last time I checked, FC took about 30s per tile (task).

              cd {{work_dir}}

              qsub -N {{ params.product}} \
              -q {{ params.queue }} \
              -W umask=33 \
              -l wd,walltime=5:00:00,mem=190GB,ncpus=48 -m abe \
              -l storage=gdata/v10+gdata/fk4+gdata/rs0+gdata/if87 \
              -M [email protected] \
              -P {{ params.project }} -o {{ work_dir }} -e {{ work_dir }} \
              -- /bin/bash -l -c \
                  "module use /g/data/v10/public/modules/modulefiles/; \
                  module load {{ params.module }}; \
                  datacube-fc run -vv --input-filename {{work_dir}}/tasks.pickle --celery pbs-launch"
            """),
            params={'product': product},
            timeout=60 * 20,
            do_xcom_push=True,
        )

        wait_for_completion = PBSJobSensor(
            task_id=f'wait_for_{product}',
            pbs_job_id="{{ ti.xcom_pull(task_ids='%s') }}" % submit_task_id,
            timeout=60 * 60 * 24 * 7,
        )

        ingest_completed >> generate_tasks >> test_tasks >> submit_fc_job >> wait_for_completion
示例#2
0
               -M [email protected] \
               -P {{ params.project }} \
               -o {{ work_dir }} \
               -e {{ work_dir }} \
          -- /bin/bash -l -c \
              "module use /g/data/v10/public/modules/modulefiles/; \
              module load {{ params.module }}; \
              module load openmpi; \
              mpirun datacube-wofs run-mpi -v --input-filename {{work_dir}}/tasks.pickle"
        """,
        do_xcom_push=True,
        timeout=60 * 20,
    )
    wait_for_wofs_albers = PBSJobSensor(
        task_id='wait_for_wofs_albers',
        pbs_job_id="{{ ti.xcom_pull(task_ids='%s') }}" % submit_task_id,
        timeout=60 * 60 * 24 * 7,
    )
    check_for_errors = SSHOperator(
        task_id='check_for_errors',
        command=COMMON + """
        error_dir={{ ti.xcom_pull(task_ids='wait_for_wofs_albers')['Error_Path'].split(':')[1] }}
        echo error_dir: ${error_dir}

        # Helper function to not error if the grep search term is not found
        c1grep() { grep "$@" || test $? = 1; }

        echo Checking for any errors or failures in PBS output

        task_failed_lines=$(c1grep -ci 'Task failed' ${error_dir}/*.ER)
        if [[ $task_failed_lines != "0" ]]; then
with DAG(
        'nci_dataset_sync',
        doc_md=__doc__,
        default_args=c2_default_args,
        catchup=False,
        schedule_interval=c2_schedule_interval,
        tags=['nci', 'landsat_c2'],
        default_view="tree",
) as dag:
    for product in SYNCED_PRODUCTS:
        submit_sync = SSHOperator(
            task_id=f'submit_sync_{product}',
            command=SYNC_COMMAND,
            params={
                'product': product,
                'sync_prefix_path': SYNC_PREFIX_PATH[product],
                'sync_suffix_path': SYNC_SUFFIX_PATH[product],
            },
            do_xcom_push=True,
            timeout=5 * MINUTES,  # For running SSH Commands
        )

        wait_for_completion = PBSJobSensor(
            task_id=f'wait_for_{product}',
            pbs_job_id="{{ ti.xcom_pull(task_ids='submit_sync_%s') }}" %
            product,
        )

        submit_sync >> wait_for_completion
        )
        test_tasks = SSHOperator(
            task_id=f'test_tasks_{ing_product}',
            ssh_conn_id='lpgs_gadi',
            command=test_tasks_command,
            params={'ing_product': ing_product},
            timeout=90,
        )

        submit_task_id = f'submit_ingest_{ing_product}'
        submit_ingest_job = SSHOperator(
            task_id=submit_task_id,
            ssh_conn_id='lpgs_gadi',
            command=qsubbed_ingest_command,
            params={'ing_product': ing_product},
            do_xcom_push=True,
            timeout=90,
        )
        wait_for_completion = PBSJobSensor(
            task_id=f'wait_for_{ing_product}_ingest',
            ssh_conn_id='lpgs_gadi',
            pbs_job_id="{{ ti.xcom_pull(task_ids='%s') }}" % submit_task_id)

        start >> save_tasks >> test_tasks >> submit_ingest_job >> wait_for_completion >> completed
"""
f = open('tasks.bin', 'rb')
while pickle.load(f): n += 1                                                                                                                                                                                      │··········
                                                                                                                                                                                                                          │··········
EOFError: Ran out of input        
"""
示例#5
0
                module load {{params.module}}
                module load openmpi/3.1.4

                mpirun --tag-output dea-cogger mpi-convert --product-name "{{params.product}}" \\
                 --output-dir "{{work_dir}}/out/" {{params.product}}_file_list.txt
                
                EOF
            """),
            do_xcom_push=True,
            timeout=60 * 5,
            params={'product': product},
        )

        wait_for_cog_convert = PBSJobSensor(
            task_id=f'wait_for_cog_convert_{product}',
            pbs_job_id="{{ ti.xcom_pull(task_ids='%s') }}" % submit_task_id,
            timeout=60 * 60 * 24 * 7,
        )

        validate_task_id = f'submit_validate_cog_job_{product}'
        validate_cogs = SSHOperator(
            task_id=validate_task_id,
            command=dedent(COMMON + """
                cd {{work_dir}}
                
                qsub <<EOF
                #!/bin/bash
                #PBS -l wd,walltime=5:00:00,mem=190GB,ncpus=48,jobfs=1GB
                #PBS -P {{params.project}}
                #PBS -q {{params.queue}}
                #PBS -l storage=gdata/v10+gdata/fk4+gdata/rs0+gdata/if87
          {% set work_dir = '~/airflow_testing/' -%}
          mkdir -p {{ work_dir }};
          cd {{ work_dir }};
          qsub \
          -q express \
          -W umask=33 \
          -l wd,walltime=0:10:00,mem=3GB -m abe \
          -l storage=gdata/v10+gdata/fk4+gdata/rs0+gdata/if87 \
          -P {{ params.project }} -o {{ work_dir }} -e {{ work_dir }} \
          -- /bin/bash -l -c \
              "source $HOME/.bashrc; \
              module use /g/data/v10/public/modules/modulefiles/; \
              module load {{ params.module }}; \
              dea-coherence --check-locationless time in [2019-12-01, 2019-12-31] > coherence-out.txt"
        """,
        params={
            'project': 'v10',
            'queue': 'normal',
            'module': 'dea/unstable',
            'year': '2019'
        },
        do_xcom_push=True,
    )
    wait_for_completion = PBSJobSensor(
        task_id='wait_for_completion',
        ssh_conn_id='lpgs_gadi',
        pbs_job_id="{{ ti.xcom_pull(task_ids='submit_pbs_job') }}"
    )

    submit_job >> wait_for_completion