示例#1
0
def retrieve_and_rename(gi, hist, ORG_NAME):
    logging.info("Retrieving and Renaming %s", ORG_NAME)
    # Now we'll run this tool
    with xunit('galaxy', 'launch_tool') as tc3:
        logging.info("Running tool")
        inputs = {
            'org_source|source_select': 'direct',
            'org_source|org_raw': ORG_NAME,
        }
        tool_run = gi.tools.run_tool(hist['id'], 'edu.tamu.cpt2.webapollo.export', inputs)
    # Now to correct the names

    with xunit('galaxy', 'watch_run') as tc4:
        (successful, msg) = watch_job_invocation(gi, tool_run['jobs'][0]['id'])

    rename_tcs = []
    logging.info("Run complete, renaming outputs")
    for dataset in tool_run['outputs']:
        if dataset['data_type'] == 'galaxy.datatypes.text.Json':
            name = '%s.json' % ORG_NAME
        elif dataset['data_type'] == 'galaxy.datatypes.sequence.Fasta':
            name = '%s.fasta' % ORG_NAME
        elif dataset['data_type'] == 'galaxy.datatypes.interval.Gff3':
            name = '%s.gff3' % ORG_NAME
        else:
            name = 'Unknown'
        logging.debug("Renaming %s (%s, %s) to %s", dataset['id'], dataset['data_type'], dataset['file_ext'], name)

        with xunit('galaxy', 'rename.%s' % dataset['file_ext']) as tmp_tc:
            (successful, msg) = watch_job_invocation(gi, tool_run['jobs'][0]['id'])
            gi.histories.update_dataset(hist['id'], dataset['id'], name=name)

        rename_tcs.append(tmp_tc)

    ts = xunit_suite('Fetching ' + ORG_NAME, [tc3, tc4] + rename_tcs)
    return ts
示例#2
0
def __main__():
    parser = argparse.ArgumentParser(
        description=
        """Script to run all workflows mentioned in workflows_to_test.
    It will import the shared workflows are create histories for each workflow run, prefixed with ``TEST_RUN_<date>:``
    Make sure the yaml has file names identical to those in the data library."""
    )

    parser.add_argument(
        '-k',
        '--api-key',
        '--key',
        dest='key',
        metavar='your_api_key',
        help=
        'The account linked to this key needs to have admin right to upload by server path',
        required=True)
    parser.add_argument(
        '-u',
        '--url',
        dest='url',
        metavar="http://galaxy_url:port",
        help="Be sure to specify the port on which galaxy is running",
        default="http://usegalaxy.org")
    parser.add_argument('-x',
                        '--xunit-output',
                        dest="xunit_output",
                        type=argparse.FileType('w'),
                        default='report.xml',
                        help="""Location to store xunit report in""")
    args = parser.parse_args()

    gi = galaxy.GalaxyInstance(args.url, args.key)
    wf = gi.workflows.get_workflows(workflow_id='7bfac6e726679b2c')[0]
    # inputMap = gi.workflows.show_workflow('7bfac6e726679b2c')['inputs']
    # import json
    # print(json.dumps(, indent=2))
    # print(wf)
    # import sys; sys.exit()

    org_names = ('CCS', )

    test_suites = []
    wf_invocations = []
    for name in org_names:
        hist = gi.histories.create_history(
            name='BuildID=%s WF=Structural Org=%s Source=Jenkins' %
            (BUILD_ID, name))
        gi.histories.create_history_tag(hist['id'], 'Automated')
        gi.histories.create_history_tag(hist['id'], 'Annotation')
        gi.histories.create_history_tag(hist['id'], 'BICH464')
        # Load the datasets into history
        datasets, fetch_test_cases = retrieve_and_rename(gi, hist, name)
        ts = xunit_suite('[%s] Fetching Data' % name, fetch_test_cases)
        test_suites.append(ts)

        # TODO: fix mapping to always work.
        # Map our inputs for invocation
        inputs = {
            '0': {
                'id': datasets['fasta']['id'],
                'src': 'hda',
            },
            '1': {
                'id': datasets['json']['id'],
                'src': 'hda',
            }
        }

        # Invoke Workflow
        wf_test_cases, watchable_invocation = run_workflow(
            gi, wf, inputs, hist)
        # Give galaxy time to process
        time.sleep(10)
        # Invoke Workflow test cases
        ts = xunit_suite('[%s] Invoking workflow' % name, wf_test_cases)
        test_suites.append(ts)
        # Store the invocation info for watching later.
        wf_invocations.append(watchable_invocation)

    invoke_test_cases = []
    for (wf_id, invoke_id) in wf_invocations:
        with xunit('galaxy',
                   'workflow_watch.%s.%s' % (wf_id, invoke_id)) as tc_watch:
            logging.info("Waiting on wf %s invocation %s", wf_id, invoke_id)
            watch_workflow_invocation(gi, wf_id, invoke_id)
        invoke_test_cases.append(tc_watch)
    ts = xunit_suite('[%s] Workflow Completion' % name, invoke_test_cases)
    args.xunit_output.write(xunit_dump(test_suites))
示例#3
0
def cli(ctx, _):
    """xargs look-alike that wraps output calls as XUnit XML

    e.g.::

        parsec histories get_histories | \
            jq '.[].id' -r | \
            head -n 3 | \
            parsec utils xunit_xargs parsec histories get_status \| jq .percent_complete

    will fetch the first three histories mentioned, and then pass them to xargs
    to run ``parsec histories get_status [history_id] | jq .percent_complete``. This will
    in turn produce XUnit XML that can be used in Jenkins or similar systems::

        <?xml version="1.0" ?>
        <testsuites errors="0" failures="0" tests="3" time="1.5944418907165527">
                <testsuite errors="0" failures="0" name="Parsec XX" skipped="0" tests="3" time="1.5944418907165527">
                        <testcase classname="parsec.histories.get_status.769f01a3981796db_|.jq..percent_complete" name="parsec.histories.get_status.769f01a3981796db_" time="0.604831">
                                <system-out>100</system-out>
                        </testcase>
                        <testcase classname="parsec.histories.get_status.83fbc32772cb5fcf_|.jq..percent_complete" name="parsec.histories.get_status.83fbc32772cb5fcf_" time="0.483556">
                                <system-out>100</system-out>
                        </testcase>
                        <testcase classname="parsec.histories.get_status.90c9282cb8718062_|.jq..percent_complete" name="parsec.histories.get_status.90c9282cb8718062_" time="0.506056">
                                <system-out>97.82608695652173</system-out>
                        </testcase>
                </testsuite>
        </testsuites>
    """
    test_cases = []
    for line in sys.stdin:
        # pretend to be xargs

        piped_commands = sys.argv[sys.argv.index('xunit_xargs') + 1:]
        if '|' in piped_commands:
            pipe_idx = piped_commands.index('|')
            piped_commands[pipe_idx] = line.strip() + ' |'
            built_command = piped_commands
        else:
            built_command = piped_commands + [line.strip()]
        # TODO: detect spaces in args and warn that they should be quoted.
        # If they provide multiple strings, then pipe them together

        xunit_identifier = '.'.join(
            [x.strip().replace(' ', '_') for x in piped_commands])
        xunit_identifier.replace(' ', '_')
        if '|' in xunit_identifier:
            xunit_name = xunit_identifier[0:xunit_identifier.index('|')]
        else:
            xunit_name = xunit_identifier

        stderr = tempfile.NamedTemporaryFile()
        output = ""
        with xunit(xunit_name, xunit_identifier) as test_case:
            ctx.vlog('Executing: %s', ' '.join(built_command))
            output = check_output(' '.join(built_command),
                                  shell=True,
                                  stderr=stderr)

        # Set stdout
        stderr.seek(0)
        test_case._tc.stdout = unicodify(output).strip()
        test_case._tc.stderr = unicodify(stderr.read()).strip()
        # Append to list
        test_cases.append(test_case)

    ts = xunit_suite('Parsec XX', test_cases)
    print(xunit_dump([ts]))
def __main__():
    parser = argparse.ArgumentParser(
        description=
        """Script to run all workflows mentioned in workflows_to_test.
    It will import the shared workflows are create histories for each workflow run, prefixed with ``TEST_RUN_<date>:``
    Make sure the yaml has file names identical to those in the data library."""
    )

    parser.add_argument(
        '-k',
        '--api-key',
        '--key',
        dest='key',
        metavar='your_api_key',
        help=
        'The account linked to this key needs to have admin right to upload by server path',
        required=True)
    parser.add_argument(
        '-u',
        '--url',
        dest='url',
        metavar="http://galaxy_url:port",
        help="Be sure to specify the port on which galaxy is running",
        default="http://usegalaxy.org")
    parser.add_argument('-x',
                        '--xunit-output',
                        dest="xunit_output",
                        type=argparse.FileType('w'),
                        default='report.xml',
                        help="""Location to store xunit report in""")
    args = parser.parse_args()

    gi = galaxy.GalaxyInstance(args.url, args.key)
    wf = gi.workflows.get_workflows(workflow_id='95c345e5129ac7f2')[0]

    org_names = ('Soft', '2ww-3119', 'ISA', 'Inf_Still_Creek', 'J76', 'K6',
                 'K7', 'K8', 'MIS1-LT2', 'MIS3-3117', 'MP16', 'Pin', 'SCI',
                 'SCS', 'SL-Ken', 'ScaAbd', 'ScaApp', 'Sw1_3003', 'Sw2-Ken',
                 'UDP', '5ww_LT2', 'Sw2-Np2', 'CCS')

    wf_data = gi.workflows.show_workflow(wf['id'])
    wf_inputs = wf_data['inputs']
    test_suites = []
    wf_invocations = []
    for name in org_names:
        try:
            hist = gi.histories.create_history(
                name='BuildID=%s WF=%s Org=%s Source=Jenkins' %
                (BUILD_ID, wf_data['name'].replace(' ', '_'), name))
            gi.histories.create_history_tag(hist['id'], 'Automated')
            gi.histories.create_history_tag(hist['id'], 'Annotation')
            gi.histories.create_history_tag(hist['id'], 'BICH464')
            # Load the datasets into history
            files = glob.glob('tmp/%s*' % name)
            for f in sorted(files):
                # Skip blastxml
                if '.NR.blastxml' in f: continue
                gi.tools.upload_file(f, hist['id'])

            datasets = gi.histories.show_history(hist['id'], contents=True)
            datasetMap = {
                dataset['name'].replace(name + '.', ''): dataset['id']
                for dataset in datasets
            }

            import pprint
            pprint.pprint(datasetMap)

            # TODO: fix mapping to always work.
            # Map our inputs for invocation
            inputs = {
                '0': {
                    'id': datasetMap['fa'],
                    'src': 'hda',
                },
                '1': {
                    'id': datasetMap['gff3'],
                    'src': 'hda',
                },
                '2': {
                    'id': datasetMap['NT.blastxml'],
                    'src': 'hda',
                },
                '3': {
                    'id': datasetMap['NR.tsv'],
                    'src': 'hda',
                },
                '4': {
                    'id': datasetMap['PG.tsv'],
                    'src': 'hda',
                },
            }

            # Invoke Workflow
            wf_test_cases, watchable_invocation = run_workflow(
                gi, wf, inputs, hist)
            # Invoke Workflow test cases
            ts = xunit_suite('[%s] Invoking workflow' % name, wf_test_cases)
            test_suites.append(ts)

            # Store the invocation info for watching later.
            wf_invocations.append(watchable_invocation)
        except:
            pass

    invoke_test_cases = []
    for (wf_id, invoke_id) in wf_invocations:
        with xunit('galaxy',
                   'workflow_watch.%s.%s' % (wf_id, invoke_id)) as tc_watch:
            logging.info("Waiting on wf %s invocation %s", wf_id, invoke_id)
            watch_workflow_invocation(gi, wf_id, invoke_id)
        invoke_test_cases.append(tc_watch)
    ts = xunit_suite('[%s] Workflow Completion' % name, invoke_test_cases)
    args.xunit_output.write(xunit_dump(test_suites))