コード例 #1
0
def build_easyconfigs_in_parallel(build_command,
                                  easyconfigs,
                                  output_dir,
                                  robot_path=None):
    """
    easyconfigs is a list of easyconfigs which can be built (e.g. they have no unresolved dependencies)
    this function will build them in parallel by submitting jobs

    returns the jobs
    """
    _log.info("going to build these easyconfigs in parallel: %s", easyconfigs)
    job_module_dict = {}
    # dependencies have already been resolved,
    # so one can linearly walk over the list and use previous job id's
    jobs = []

    # create a single connection, and reuse it
    conn = connect_to_server()
    if conn is None:
        _log.error("connect_to_server returned %s, can't submit jobs." %
                   (conn))

    # determine ppn once, and pass is to each job being created
    # this avoids having to figure out ppn over and over again, every time creating a temp connection to the server
    ppn = get_ppn()

    for ec in easyconfigs:
        # This is very important, otherwise we might have race conditions
        # e.g. GCC-4.5.3 finds cloog.tar.gz but it was incorrectly downloaded by GCC-4.6.3
        # running this step here, prevents this
        prepare_easyconfig(ec, robot_path=robot_path)

        # the new job will only depend on already submitted jobs
        _log.info("creating job for ec: %s" % str(ec))
        new_job = create_job(build_command, ec, output_dir, conn=conn, ppn=ppn)

        # Sometimes unresolvedDependencies will contain things, not needed to be build.
        job_deps = [
            job_module_dict[dep] for dep in ec['unresolvedDependencies']
            if dep in job_module_dict
        ]
        new_job.add_dependencies(job_deps)
        new_job.submit()
        _log.info("job for module %s has been submitted (job id: %s)" %
                  (new_job.module, new_job.jobid))

        # update dictionary
        job_module_dict[new_job.module] = new_job.jobid
        new_job.cleanup()
        jobs.append(new_job)

    disconnect_from_server(conn)

    return jobs
コード例 #2
0
def build_easyconfigs_in_parallel(build_command, easyconfigs, output_dir, robot_path=None):
    """
    easyconfigs is a list of easyconfigs which can be built (e.g. they have no unresolved dependencies)
    this function will build them in parallel by submitting jobs

    returns the jobs
    """
    _log.info("going to build these easyconfigs in parallel: %s", easyconfigs)
    job_ids = {}
    # dependencies have already been resolved,
    # so one can linearly walk over the list and use previous job id's
    jobs = []

    # create a single connection, and reuse it
    conn = connect_to_server()
    if conn is None:
        _log.error("connect_to_server returned %s, can't submit jobs." % (conn))

    # determine ppn once, and pass is to each job being created
    # this avoids having to figure out ppn over and over again, every time creating a temp connection to the server
    ppn = get_ppn()

    def tokey(dep):
        """Determine key for specified dependency."""
        return det_full_module_name(dep)

    for ec in easyconfigs:
        # This is very important, otherwise we might have race conditions
        # e.g. GCC-4.5.3 finds cloog.tar.gz but it was incorrectly downloaded by GCC-4.6.3
        # running this step here, prevents this
        prepare_easyconfig(ec, robot_path=robot_path)

        # the new job will only depend on already submitted jobs
        _log.info("creating job for ec: %s" % str(ec))
        new_job = create_job(build_command, ec, output_dir, conn=conn, ppn=ppn)

        # sometimes unresolved_deps will contain things, not needed to be build
        job_deps = [job_ids[dep] for dep in map(tokey, ec['unresolved_deps']) if dep in job_ids]
        new_job.add_dependencies(job_deps)
        new_job.submit()
        _log.info("job for module %s has been submitted (job id: %s)" % (new_job.module, new_job.jobid))

        # update dictionary
        job_ids[new_job.module] = new_job.jobid
        new_job.cleanup()
        jobs.append(new_job)

    disconnect_from_server(conn)

    return jobs
コード例 #3
0
def build_easyconfigs_in_parallel(build_command, easyconfigs, output_dir=None, prepare_first=True):
    """
    easyconfigs is a list of easyconfigs which can be built (e.g. they have no unresolved dependencies)
    this function will build them in parallel by submitting jobs
    @param build_command: build command to use
    @param easyconfigs: list of easyconfig files
    @param output_dir: output directory
    returns the jobs
    """
    _log.info("going to build these easyconfigs in parallel: %s", easyconfigs)
    job_ids = {}
    # dependencies have already been resolved,
    # so one can linearly walk over the list and use previous job id's
    jobs = []

    # create a single connection, and reuse it
    conn = connect_to_server()
    if conn is None:
        _log.error("connect_to_server returned %s, can't submit jobs." % (conn))

    # determine ppn once, and pass is to each job being created
    # this avoids having to figure out ppn over and over again, every time creating a temp connection to the server
    ppn = get_ppn()

    def tokey(dep):
        """Determine key for specified dependency."""
        return ActiveMNS().det_full_module_name(dep)

    for ec in easyconfigs:
        # this is very important, otherwise we might have race conditions
        # e.g. GCC-4.5.3 finds cloog.tar.gz but it was incorrectly downloaded by GCC-4.6.3
        # running this step here, prevents this
        if prepare_first:
            prepare_easyconfig(ec)

        # the new job will only depend on already submitted jobs
        _log.info("creating job for ec: %s" % str(ec))
        new_job = create_job(build_command, ec, output_dir=output_dir, conn=conn, ppn=ppn)

        # sometimes unresolved_deps will contain things, not needed to be build
        job_deps = [job_ids[dep] for dep in map(tokey, ec['unresolved_deps']) if dep in job_ids]
        new_job.add_dependencies(job_deps)

        # place user hold on job to prevent it from starting too quickly,
        # we might still need it in the queue to set it as a dependency for another job;
        # only set hold for job without dependencies, other jobs have a dependency hold set anyway
        with_hold = False
        if not job_deps:
            with_hold = True

        # actually (try to) submit job
        new_job.submit(with_hold)
        _log.info("job for module %s has been submitted (job id: %s)" % (new_job.module, new_job.jobid))

        # update dictionary
        job_ids[new_job.module] = new_job.jobid
        new_job.cleanup()
        jobs.append(new_job)

    # release all user holds on jobs after submission is completed
    for job in jobs:
        if job.has_holds():
            _log.info("releasing hold on job %s" % job.jobid)
            job.release_hold()

    disconnect_from_server(conn)

    return jobs
コード例 #4
0
def build_easyconfigs_in_parallel(build_command,
                                  easyconfigs,
                                  output_dir=None,
                                  prepare_first=True):
    """
    easyconfigs is a list of easyconfigs which can be built (e.g. they have no unresolved dependencies)
    this function will build them in parallel by submitting jobs
    @param build_command: build command to use
    @param easyconfigs: list of easyconfig files
    @param output_dir: output directory
    returns the jobs
    """
    _log.info("going to build these easyconfigs in parallel: %s", easyconfigs)
    job_ids = {}
    # dependencies have already been resolved,
    # so one can linearly walk over the list and use previous job id's
    jobs = []

    # create a single connection, and reuse it
    conn = connect_to_server()
    if conn is None:
        raise EasyBuildError(
            "connect_to_server returned %s, can't submit jobs.", conn)

    # determine ppn once, and pass is to each job being created
    # this avoids having to figure out ppn over and over again, every time creating a temp connection to the server
    ppn = get_ppn()

    def tokey(dep):
        """Determine key for specified dependency."""
        return ActiveMNS().det_full_module_name(dep)

    for ec in easyconfigs:
        # this is very important, otherwise we might have race conditions
        # e.g. GCC-4.5.3 finds cloog.tar.gz but it was incorrectly downloaded by GCC-4.6.3
        # running this step here, prevents this
        if prepare_first:
            prepare_easyconfig(ec)

        # the new job will only depend on already submitted jobs
        _log.info("creating job for ec: %s" % str(ec))
        new_job = create_job(build_command,
                             ec,
                             output_dir=output_dir,
                             conn=conn,
                             ppn=ppn)

        # sometimes unresolved_deps will contain things, not needed to be build
        job_deps = [
            job_ids[dep] for dep in map(tokey, ec['unresolved_deps'])
            if dep in job_ids
        ]
        new_job.add_dependencies(job_deps)

        # place user hold on job to prevent it from starting too quickly,
        # we might still need it in the queue to set it as a dependency for another job;
        # only set hold for job without dependencies, other jobs have a dependency hold set anyway
        with_hold = False
        if not job_deps:
            with_hold = True

        # actually (try to) submit job
        new_job.submit(with_hold)
        _log.info("job for module %s has been submitted (job id: %s)" %
                  (new_job.module, new_job.jobid))

        # update dictionary
        job_ids[new_job.module] = new_job.jobid
        new_job.cleanup()
        jobs.append(new_job)

    # release all user holds on jobs after submission is completed
    for job in jobs:
        if job.has_holds():
            _log.info("releasing hold on job %s" % job.jobid)
            job.release_hold()

    disconnect_from_server(conn)

    return jobs