예제 #1
0
def handler(event, context):
    '''
    this is triggered on completed file upload from s3 and
    event will be set to file data.
    '''
    # get file name
    # print(event)

    input_json = make_input(event)
    extra_file_format = get_extra_file_format(event)
    status = get_status(event)
    if extra_file_format:
        if status != 'to be uploaded by workflow':
            # for extra file-triggered md5 run, status check is skipped.
            input_json['input_files'][0]['format_if_extra'] = extra_file_format
            response = run_workflow(workflow=WORKFLOW_NAME,
                                    input_json=input_json)
    else:
        # only run if status is uploading...
        if status == 'uploading' or event.get('force_run'):
            # trigger the step function to run
            response = run_workflow(workflow=WORKFLOW_NAME,
                                    input_json=input_json)
        else:
            return {'info': 'status is not uploading'}

    # pop no json serializable stuff...
    response.pop('startDate')
    return response
예제 #2
0
def handler(event, context):
    '''
    this is triggered on completed file upload from s3 and
    event will be set to file data.
    '''
    # get file name
    # print(event)

    status = get_status(event)
    input_json = make_input(event)
    file_format, extra = get_file_format(event)
    if extra:  # the file is an extra file
        extra_status = get_status_for_extra_file(event, file_format)
        if status != 'to be uploaded by workflow':
            if not extra_status or extra_status != 'to be uploaded by workflow':
                input_json['input_files'][0]['format_if_extra'] = file_format
                response = run_workflow(sfn=TIBANNA_DEFAULT_STEP_FUNCTION_NAME,
                                        input_json=input_json)
            else:
                return {
                    'info':
                    'status for extra file is to be uploaded by workflow'
                }
        else:
            return {
                'info':
                'parent status for extra file is to be uploaded by workflow'
            }
    else:
        # only run if status is uploading...
        if status == 'uploading' or event.get('force_run'):
            # trigger the step function to run
            response = run_workflow(sfn=TIBANNA_DEFAULT_STEP_FUNCTION_NAME,
                                    input_json=input_json)
        else:
            return {'info': 'status is not uploading'}

    # run fastqc as a dependent of md5
    if file_format == 'fastq':
        md5_arn = response['_tibanna']['exec_arn']
        input_json_fastqc = make_input(event,
                                       'fastqc-0-11-4-1',
                                       dependency=[md5_arn],
                                       run_name_prefix='fastqc')
        response_fastqc = run_workflow(sfn=TIBANNA_DEFAULT_STEP_FUNCTION_NAME,
                                       input_json=input_json_fastqc)
        serialize_startdate(response_fastqc)
        response['fastqc'] = response_fastqc
    serialize_startdate(response)
    return response
예제 #3
0
def handler(event, context):
    env_name = event['env_name']
    sfn = event.get('step_function_name')
    if sfn:
        res = utils.run_workflow(event, env=env_name, sfn=sfn)
    else:
        res = utils.run_workflow(event, env=env_name)

    try:
        start = str(res['_tibanna']['response']['startDate'])
        res['_tibanna']['response']['startDate'] = start
    except:
        pass

    return res
예제 #4
0
파일: service.py 프로젝트: hms-dbmi/tibanna
def run_wf(command):
    args = {
        "workflow": command,
        "input_json": '',
    }

    logger.info("in run_wf ars are %s" % args)
    # some defaults stuff here
    if args['workflow'].startswith("hic_parta"):
        args['workflow'] = 'run_sbg_workflow_2'
        args['input_json'] = test_hic_data()
    else:
        return not_sure()

    run_data = run_workflow(**args)
    run_name = run_data[_tibanna]['run_name']
    run_url = run_data[_tibanna]["url"]

    # make a whitty response
    terms = ['take off', 'blast off', 'running']
    random_giphy = giphy(random.choice(terms))

    # return an attachment?
    slack_args = {
        'title': "workflow run %s started!" % run_name,
        'title_link': run_url,
        'text': "Your workflow is running. See status here %s" % run_url,
        'image_url': random_giphy,
    }

    response = {'attachments': [make_slack_attachment(**slack_args)]}
    return response
예제 #5
0
def testrun_md5(workflow_name='tibanna_pony', env='webdev'):
    """Creates a random file object with no md5sum/content_md5sum and run md5 workflow.
    It waits for 6 mintues till the workflow run finishes and checks the input file object
    has been updated.
    """
    bucket = "elasticbeanstalk-fourfront-" + env + "-wfoutput"
    ff_key = get_authentication_with_server(ff_env='fourfront-' + env)
    newfile = post_random_file(bucket, ff_key)
    uuid = newfile['uuid']
    accession = newfile['accession']
    input_json = {
        "config": {
            "ebs_type": "io1",
            "ebs_iops": 500,
            "s3_access_arn":
            "arn:aws:iam::643366669028:instance-profile/S3_access",
            "ami_id": "ami-cfb14bb5",
            "json_bucket": "4dn-aws-pipeline-run-json",
            "shutdown_min": 30,
            "copy_to_s3": True,
            "launch_instance": True,
            "log_bucket": "tibanna-output",
            "script_url":
            "https://raw.githubusercontent.com/4dn-dcic/tibanna/master/awsf/",
            "key_name": "4dn-encode",
            "password": ""
        },
        "_tibanna": {
            "env": "fourfront-webdev",
            "run_type": "md5"
        },
        "parameters": {},
        "app_name":
        "md5",
        "workflow_uuid":
        "c77a117b-9a58-477e-aaa5-291a109a99f6",
        "input_files": [{
            "workflow_argument_name": "input_file",
            "bucket_name": bucket,
            "uuid": uuid,
            "object_key": accession + '.pairs.gz'
        }],
        "output_bucket":
        bucket
    }
    resp = run_workflow(input_json, workflow=workflow_name)
    print(resp)

    # check result
    time.sleep(6 * 60)  # wait for 6 minutes
    filemeta = get_metadata(uuid, key=ff_key, add_on='?datastore=database')
    content_md5sum = filemeta.get('content_md5sum')
    md5sum = filemeta.get('md5sum')
    if content_md5sum and md5sum:
        print(content_md5sum)
        print(md5sum)
        patch_metadata({'status': 'deleted'}, uuid, key=ff_key)
    else:
        raise Exception('md5 step function run failed')
예제 #6
0
def testrun_md5_input_json_w_extra_file_object_name(env='webdev'):
    """Creates a random file object with no md5sum/content_md5sum and run md5 workflow.
    It waits for 6 mintues till the workflow run finishes and checks the input file object
    has been updated.
    """
    bucket = "elasticbeanstalk-fourfront-" + env + "-wfoutput"
    ff_key = get_authentication_with_server(ff_env='fourfront-' + env)
    newfile = post_random_file(bucket, ff_key)
    uuid = newfile['uuid']
    accession = newfile['accession']
    wf_uuid = "c77a117b-9a58-477e-aaa5-291a109a99f6"
    input_json = {
        "config": {
            "ebs_type": "io1",
            "ebs_iops": 500,
            "json_bucket": "4dn-aws-pipeline-run-json",
            "shutdown_min": 30,
            "log_bucket": "tibanna-output",
            "key_name": "4dn-encode",
            "password": ""
        },
        "_tibanna": {
            "env": "fourfront-webdev",
            "run_type": "md5_test_extra"
        },
        "parameters": {},
        "app_name":
        "md5",
        "workflow_uuid":
        wf_uuid,
        "input_files": [{
            "workflow_argument_name": "input_file",
            "bucket_name": bucket,
            "uuid": uuid,
            "object_key": accession + '.pairs.gz.px2',
            "format_if_extra": "pairs_px2"
        }],
        "output_bucket":
        bucket,
        "wfr_meta": {
            "notes": "extra file md5 trigger test from test_webdev.py"
        }
    }
    resp = run_workflow(input_json)
    print(resp)

    # check result
    time.sleep(6 * 60)  # wait for 6 minutes
    filemeta = get_metadata(uuid, key=ff_key, add_on='?datastore=database')
    content_md5sum = filemeta.get('extra_files')[0].get('content_md5sum')
    md5sum = filemeta.get('extra_files')[0].get('md5sum')
    file_size = filemeta.get('extra_files')[0].get('file_size')
    wfr_uuid = get_wfr_uuid(resp['_tibanna']['exec_arn'])
    wfr_meta = get_metadata(wfr_uuid, key=ff_key, add_on='?datastore=database')
    assert 'input_files' in wfr_meta
    assert 'format_if_extra' in wfr_meta['input_files'][0]
    assert md5sum
    assert content_md5sum
    assert file_size
    print(content_md5sum)
    print(md5sum)
    print(file_size)
    patch_metadata({'status': 'deleted'}, uuid, key=ff_key)
    patch_metadata({'status': 'deleted'}, wfr_uuid, key=ff_key)