def main(argv=None):
    if ArgumentParser is None:
        raise Exception("Test requires Python 2.7")
    arg_parser = ArgumentParser(description=DESCRIPTION)
    arg_parser.add_argument("--api_key", default="testmasterapikey")
    arg_parser.add_argument("--host", default="http://localhost:8080/")

    arg_parser.add_argument("--collection_size", type=int, default=20)
    arg_parser.add_argument("--workflow_depth", type=int, default=10)
    arg_parser.add_argument("--two_outputs", default=False, action="store_true")
    arg_parser.add_argument("--workflow_count", type=int, default=1)

    args = arg_parser.parse_args(argv)
    uuid = str(uuid4())
    workflow_struct = _workflow_struct(args, uuid)
    gi = _gi(args)

    workflow = yaml_to_workflow.python_to_workflow(workflow_struct)
    workflow_info = gi.workflows.import_workflow_json(workflow)
    workflow_id = workflow_info["id"]

    target = functools.partial(_run, args, gi, workflow_id, uuid)
    threads = []
    for i in range(args.workflow_count):
        t = Thread(target=target)
        t.daemon = True
        t.start()
        threads.append(t)

    for t in threads:
        t.join()
def main(argv=None):
    if ArgumentParser is None:
        raise Exception("Test requires Python 2.7")
    arg_parser = ArgumentParser(description=DESCRIPTION)
    arg_parser.add_argument("--api_key", default="testmasterapikey")
    arg_parser.add_argument("--host", default="http://localhost:8080/")

    arg_parser.add_argument("--collection_size", type=int, default=20)
    arg_parser.add_argument("--workflow_depth", type=int, default=10)
    arg_parser.add_argument("--two_outputs",
                            default=False,
                            action="store_true")
    arg_parser.add_argument("--workflow_count", type=int, default=1)

    args = arg_parser.parse_args(argv)
    uuid = str(uuid4())
    workflow_struct = _workflow_struct(args, uuid)
    gi = _gi(args)

    workflow = yaml_to_workflow.python_to_workflow(workflow_struct)
    workflow_info = gi.workflows.import_workflow_json(workflow)
    workflow_id = workflow_info["id"]

    target = functools.partial(_run, args, gi, workflow_id, uuid)
    threads = []
    for i in range(args.workflow_count):
        t = Thread(target=target)
        t.daemon = True
        t.start()
        threads.append(t)

    for t in threads:
        t.join()
Example #3
0
def main(argv=None):
    """Entry point for workflow driving."""
    arg_parser = ArgumentParser(description=DESCRIPTION)
    arg_parser.add_argument("--api_key", default="testmasterapikey")
    arg_parser.add_argument("--host", default="http://localhost:8080/")

    arg_parser.add_argument("--collection_size", type=int, default=20)

    arg_parser.add_argument("--schedule_only_test", default=False, action="store_true")
    arg_parser.add_argument("--workflow_depth", type=int, default=10)
    arg_parser.add_argument("--workflow_count", type=int, default=1)

    group = arg_parser.add_mutually_exclusive_group()
    group.add_argument("--two_outputs", default=False, action="store_true")
    group.add_argument("--wave_simple", default=False, action="store_true")

    args = arg_parser.parse_args(argv)

    uuid = str(uuid4())
    workflow_struct = _workflow_struct(args, uuid)

    has_input = any([s.get("type", "tool") == "input_collection" for s in workflow_struct])
    if not has_input:
        uuid = None

    gi = _gi(args)

    workflow = yaml_to_workflow.python_to_workflow(workflow_struct)
    workflow_info = gi.workflows.import_workflow_json(workflow)
    workflow_id = workflow_info["id"]

    target = functools.partial(_run, args, gi, workflow_id, uuid)
    threads = []
    for i in range(args.workflow_count):
        t = Thread(target=target)
        t.daemon = True
        t.start()
        threads.append(t)

    for t in threads:
        t.join()
def main(argv=None):
    """Entry point for workflow driving."""
    arg_parser = ArgumentParser(description=DESCRIPTION)
    arg_parser.add_argument("--api_key", default="testmasterapikey")
    arg_parser.add_argument("--host", default="http://localhost:8080/")

    arg_parser.add_argument("--collection_size", type=int, default=20)

    arg_parser.add_argument("--schedule_only_test", default=False, action="store_true")
    arg_parser.add_argument("--workflow_depth", type=int, default=10)
    arg_parser.add_argument("--workflow_count", type=int, default=1)

    group = arg_parser.add_mutually_exclusive_group()
    group.add_argument("--two_outputs", default=False, action="store_true")
    group.add_argument("--wave_simple", default=False, action="store_true")

    args = arg_parser.parse_args(argv)

    uuid = str(uuid4())
    workflow_struct = _workflow_struct(args, uuid)

    has_input = any([s.get("type", "tool") == "input_collection" for s in workflow_struct])
    if not has_input:
        uuid = None

    gi = _gi(args)

    workflow = yaml_to_workflow.python_to_workflow(workflow_struct)
    workflow_info = gi.workflows.import_workflow_json(workflow)
    workflow_id = workflow_info["id"]

    target = functools.partial(_run, args, gi, workflow_id, uuid)
    threads = []
    for i in range(args.workflow_count):
        t = Thread(target=target)
        t.daemon = True
        t.start()
        threads.append(t)

    for t in threads:
        t.join()