Esempio n. 1
0
def make_dag(file):
    with open(file, 'r') as f:
        job = json.load(f)

    if "uid" not in job:
        raise Exception("UID must be part of the job")

    uid = job["uid"]
    if "start_day" in job:
        start_day = job["start_day"]
    else:
        start_day = datetime.combine(
            datetime.utcfromtimestamp(os.path.getctime(file)) -
            timedelta(_delta + 1), datetime.min.time())

    end_day = datetime.combine(start_day + timedelta(_delta - 1),
                               datetime.min.time())

    owner = 'SciDAP'
    if "author" in job:
        owner = job['author']

    email = '*****@*****.**'
    if "email" in job:
        email = job['email']

    dag_id = uid + "_bam2bigwig"
    default_args = {
        'owner': owner,
        'start_date': start_day,
        'email': [email],
        'email_on_failure': False,
        'email_on_retry': False,
        'end_date': end_day,
        'on_failure_callback': fail_callback
    }

    dag = CWLDAG(dag_id=dag_id,
                 cwl_workflow="workflows/scidap/bam-genomecov-bigwig.cwl",
                 schedule_interval=timedelta(days=_delta),
                 default_args=default_args)
    dag.create()
    dag.assign_job_dispatcher(
        JobDispatcher(task_id="read", read_file=file, dag=dag))
    dag.assign_job_cleanup(
        JobCleanup(task_id="cleanup",
                   outputs=dag.get_output_list(),
                   rm_files=[file],
                   dag=dag))
    globals()[dag_id] = dag
    return dag
Esempio n. 2
0
def make_dag(file):
    with open(file, 'r') as f:
        job = json.load(f)

    if "uid" not in job:
        raise Exception("UID must be part of the job")

    uid = job["uid"]
    if "start_day" in job:
        start_day = job["start_day"]
    else:
        start_day = datetime.combine(datetime.utcfromtimestamp(os.path.getctime(file)) - timedelta(_delta+1),
                                     datetime.min.time())

    end_day = datetime.combine(start_day + timedelta(_delta-1),
                               datetime.min.time())

    owner = 'SciDAP'
    if "author" in job:
        owner = job['author']

    email = '*****@*****.**'
    if "email" in job:
        email = job['email']

    dag_id = uid+"_bam2bigwig"
    default_args = {
        'owner': owner,
        'start_date': start_day,
        'email': [email],
        'email_on_failure': False,
        'email_on_retry': False,
        'end_date': end_day,
        'on_failure_callback': fail_callback
    }

    dag = CWLDAG(
        dag_id=dag_id,
        cwl_workflow="workflows/scidap/bam-genomecov-bigwig.cwl",
        schedule_interval=timedelta(days=_delta),
        default_args=default_args)
    dag.create()
    dag.assign_job_dispatcher(JobDispatcher(task_id="read", read_file=file, dag=dag))
    dag.assign_job_cleanup(JobCleanup(task_id="cleanup", outputs=dag.get_output_list(), rm_files=[file], dag=dag))
    globals()[dag_id] = dag
    return dag
Esempio n. 3
0
def make_dag(file):
    with open(file, 'r') as f:
        job = json.load(f)

    if "uid" not in job:
        raise Exception("UID must be part of the job")

    uid = job["uid"]

    owner = 'SciDAP'
    if "author" in job:
        owner = job['author']

    email = '*****@*****.**'
    if "email" in job:
        email = job['email']

    dag_id = uid+"_bam2bigwig"
    default_args = {
        'owner': owner,
        'start_date': start_day,
        'email': [email],
        'email_on_failure': False,
        'email_on_retry': False,
        'end_date': end_day,
        'on_failure_callback': fail_callback
    }

    dag = CWLDAG(
        dag_id=dag_id,
        cwl_workflow="workflows/scidap/bam-genomecov-bigwig.cwl",
        default_args=default_args)
    dag.create()
    dag.assign_job_dispatcher(JobDispatcher(task_id="read", read_file=file, dag=dag))
    dag.assign_job_cleanup(JobCleanup(task_id="cleanup", outputs=dag.get_output_list(), rm_files=[file], dag=dag))
    globals()[dag_id] = dag
    return dag