示例#1
0
def graph(args):
    data = load_infrabox_json(args.infrabox_json)
    jobs = get_job_list(data, args, base_path=args.project_root)

    d = 'digraph "%s" {' % args.project_name

    for j in jobs:
        parents = j['parents']

        if len(parents) == 0:
            continue

        parent_name = get_parent_name(parents)

        for i in range(0, len(parents)):
            p = parents[i]
            d += ' subgraph "cluster_%s" {\n' % get_parent_name(
                parents[0:i + 1])

        d += '   label="%s";\n' % parent_name.split('/')[-1]
        d += '   "%s";\n' % j['name']

        for i in range(0, len(parents)):
            p = parents[i]
            d += ' }\n'

    for j in jobs:
        for dep in j.get('depends_on', []):
            d += '"%s" -> "%s";\n' % (dep, j['name'])

    d += "}"

    with open(args.output, 'w') as out:
        out.write(d)
示例#2
0
文件: list_jobs.py 项目: metti/cli
def list_jobs(args):
    args.project_root = os.path.abspath(args.project_root)
    infrabox_json_path = os.path.join(args.project_root, 'infrabox.json')
    if not os.path.isfile(infrabox_json_path):
        logger.error('%s does not exist' % infrabox_json_path)
        sys.exit(1)

    data = load_infrabox_json(args.infrabox_json)
    jobs = get_job_list(data, args, base_path=args.project_root)

    jobs.sort(key=lambda x: x['name'])
    for j in jobs:
        print(j['name'])
示例#3
0
def list_jobs(args):
    args.project_root = os.path.abspath(args.project_root)
    infrabox_json_path = os.path.join(args.project_root, 'infrabox.json')
    if not os.path.isfile(infrabox_json_path):
        logger.error('%s does not exist' % infrabox_json_path)
        sys.exit(1)

    data = load_infrabox_json(args.infrabox_json)
    jobs = get_job_list(data, args, base_path=args.project_root)

    cache = WorkflowCache(args)
    cache.add_jobs(jobs)
    cache.print_tree()
示例#4
0
文件: run.py 项目: AlienCookie/cli
def run(args):
    # Init workflow cache
    cache = WorkflowCache(args)

    # validate infrabox.json
    data = load_infrabox_json(args.infrabox_json)
    jobs = get_job_list(data, args, infrabox_context=args.project_root)

    if not args.job_name:
        # We run all jobs, so clear all cached jobs
        cache.clear()

    # Cache all jobs
    cache.add_jobs(jobs)

    for j in cache.get_jobs(job_name=args.job_name, children=args.children):
        build_and_run(args, j, cache)
示例#5
0
文件: run.py 项目: metti/cli
def run(args):
    # validate infrabox.json
    data = load_infrabox_json(args.infrabox_json)
    jobs = get_job_list(data, args, base_path=args.project_root)

    # check if job name exists
    job = None
    if args.job_name:
        for j in jobs:
            if j['name'] == args.job_name:
                job = j
                break

        if not job:
            logger.error("job %s not found in infrabox.json" % args.job_name)
            sys.exit(1)

    if job:
        build_and_run(args, job)
    else:
        for j in jobs:
            build_and_run(args, j)
示例#6
0
def run(args):
    # Init workflow cache
    cache = WorkflowCache(args)

    # validate infrabox.json
    data = load_infrabox_json(args.infrabox_json)
    jobs = get_job_list(data, args, base_path=args.project_root)

    if args.job_name:
        cache.add_jobs(jobs)
        job = cache.get_job(args.job_name)

        if not job:
            logger.error("job %s not found in infrabox.json" % args.job_name)
            sys.exit(1)

        build_and_run(args, job, cache)
    else:
        cache.clear()
        cache.add_jobs(jobs)
        for j in jobs:
            build_and_run(args, j, cache)
示例#7
0
文件: run.py 项目: AlienCookie/cli
def build_and_run(args, job, cache):
    check_if_supported(job)

    # check if depedency conditions are met
    for dep in job.get("depends_on", []):
        on = dep['on']
        parent = get_parent_job(dep['job'])

        if not parent:
            continue

        if parent['state'] not in on:
            logger.info('Skipping job %s' % job['name'])
            track_as_parent(job, 'skipped')
            return

    job_type = job['type']
    start_date = datetime.now()

    logger.info("Starting job %s" % job['name'])

    state = 'finished'

    try:
        if job_type == "docker-compose":
            build_and_run_docker_compose(args, job)
        elif job_type == "docker":
            build_and_run_docker(args, job)
        elif job_type == "wait":
            # do nothing
            pass
        else:
            logger.error("Unknown job type")
            sys.exit(1)
    except Exception as e:
        state = 'failure'
        traceback.print_exc(file=sys.stdout)
        logger.warn("Job failed: %s" % e)
        sys.exit(1)

    if not job.get('directories', None):
        return

    # Dynamic child jobs
    infrabox_json = os.path.join(job['directories']['output'], 'infrabox.json')

    jobs = []
    if os.path.exists(infrabox_json):
        logger.info("Loading generated jobs")

        data = load_infrabox_json(infrabox_json)
        jobs = get_job_list(data,
                            args,
                            infrabox_context=os.path.join(
                                args.project_root, '.infrabox', 'output'))

    end_date = datetime.now()

    track_as_parent(job, state, start_date, end_date)
    logger.info("Finished job %s" % job['name'])

    for j in jobs:
        # Prefix name with parent
        j['name'] = job['name'] + '/' + j['name']

        # Add dependencies to all root jobs
        if not j.get('depends_on', None):
            j['depends_on'] = [{"on": ["finished"], "job": job['name']}]
        else:
            dependencies = copy.deepcopy(j['depends_on'])

            for d in dependencies:
                d['job'] = job['name'] + '/' + d['job']

            j['depends_on'] = dependencies

        cache.add_job(j)

    if args.children:
        for j in jobs:
            build_and_run(args, j, cache)