Example #1
0
def trigger_if_necessary(da, pkg, rosdistro, jenkins_instance, missing_by_arch):
    if da != 'source' and 'source' in missing_by_arch and pkg in missing_by_arch['source']:
        print ("  Skipping trigger of binarydeb job for package '%s' on arch '%s' as the sourcedeb job will trigger them automatically" % (pkg, da))
        return False

    if da == 'source':
        job_name = '%s_sourcedeb' % (debianize_package_name(rosdistro, pkg))
    else:
        job_name = '%s_binarydeb_%s' % (debianize_package_name(rosdistro, pkg), da)
    job_info = jenkins_instance.get_job_info(job_name)

    if 'color' in job_info and 'anime' in job_info['color']:
        print ("  Skipping trigger of job %s because it's already running" % job_name)
        return False

    if 'inQueue' in job_info and job_info['inQueue']:
        print ("  Skipping trigger of job '%s' because it's already queued" % job_name)
        return False

    if da != 'source' and 'upstreamProjects' in job_info:
        upstream = job_info['upstreamProjects']
        for p in missing_by_arch[da]:
            p_name = '%s_binarydeb_%s' % (debianize_package_name(rosdistro, p), da)
            for u in upstream:
                if u['name'] == p_name:
                    print ("  Skipping trigger of job '%s' because the upstream job '%s' is also triggered" % (job_name, p_name))
                    return False

    print ("Triggering '%s'" % (job_name))
    jenkins_instance.build_job(job_name)
    return True
Example #2
0
def get_dependencies(rd, packages):
    dependencies = {}
    v = rospack.init_rospack_interface()
    for p in packages:
        deps = rd.get_depends(p)
        dp = debianize_package_name(rd.name, p)
        dependencies[dp] = []
        combined_deps = set(deps['build']) | set(deps['run'])
        for d in combined_deps:
            if not rospack.is_system_dependency(v, d):
                dependencies[dp].append(debianize_package_name(rd.name, d))
    return dependencies
Example #3
0
def get_dependencies(rd, packages):
    dependencies = {}
    v = rospack.init_rospack_interface()
    for p in packages:
        deps = rd.get_depends(p)
        dp = debianize_package_name(rd.name, p)
        dependencies[dp] = []
        combined_deps = set(deps['build']) | set(deps['run'])
        for d in combined_deps:
            if not rospack.is_system_dependency(v, d):
                dependencies[dp].append(debianize_package_name(rd.name, d))
    return dependencies
Example #4
0
def trigger_if_necessary(da, pkg, rosdistro, jenkins_instance, missing_by_arch):
    if da != 'source' and 'source' in missing_by_arch and pkg in missing_by_arch['source']:
        print ("  Skipping trigger of binarydeb job for package '%s' on arch '%s' as the sourcedeb job will trigger them automatically" % (pkg, da))
        return False

    if da == 'source':
        job_name = '%s_sourcedeb' % (debianize_package_name(rosdistro, pkg))
    else:
        job_name = '%s_binarydeb_%s' % (debianize_package_name(rosdistro, pkg), da)
    job_info = jenkins_instance.get_job_info(job_name)

    if 'color' in job_info and 'anime' in job_info['color']:
        print ("  Skipping trigger of job %s because it's already running" % job_name)
        return False

    if 'inQueue' in job_info and job_info['inQueue']:
        print ("  Skipping trigger of job '%s' because it's already queued" % job_name)
        return False

    if da != 'source' and 'upstreamProjects' in job_info:
        upstream = job_info['upstreamProjects']
        for p in missing_by_arch[da]:
            p_name = '%s_binarydeb_%s' % (debianize_package_name(rosdistro, p), da)
            for u in upstream:
                if u['name'] == p_name:
                    print ("  Skipping trigger of job '%s' because the upstream job '%s' is also triggered" % (job_name, p_name))
                    return False

    print ("Triggering '%s'" % (job_name))
    #return jenkins_instance.build_job(job_name)
    # replicate internal implementation of Jenkins.build_job()
    import urllib2
    if not jenkins_instance.job_exists(job_name):
        raise jenkins.JenkinsException('no such job[%s]' % (job_name))
    # pass parameters to create a POST request instead of GET
    return jenkins_instance.jenkins_open(urllib2.Request(jenkins_instance.build_job_url(job_name), 'foo=bar'))
Example #5
0
def doit(job_params, dry_maintainers, packages, rosdist_rep,
         wet_only=False, commit = False, delete_extra_jobs = False, whitelist_repos = None):

    jenkins_instance = None
    if commit or delete_extra_jobs:
        jenkins_config = jenkins_support.load_server_config_file(jenkins_support.get_default_catkin_debs_config())
        jenkins_instance = jenkins_support.JenkinsConfig_to_handle(jenkins_config)

    rosdistro = job_params.rosdistro
    rd = job_params.rd


    # We take the intersection of repo-specific targets with default
    # targets.
    results = {}

    for repo_name in sorted(rd.get_repositories()):
        if whitelist_repos and repo_name not in whitelist_repos:
            continue

        r = rd.get_repository(repo_name)

        print ('Configuring WET repo "%s" at "%s" for "%s"' % (r.name, r.url, job_params.distros))
        p_list = [p.name for p in r.packages]
        for p in sorted(p_list):
            if not r.version:
                print('- skipping "%s" since version is null' % p)
                continue
            pkg_name = debianize_package_name(rosdistro, p)
            maintainers = rd.get_maintainers(p)
            pp = PackageParams(package_name=pkg_name,
                               package=packages[p],
                               release_uri=r.url,
                               short_package_name=p,
                               maintainers=maintainers)

            results[pkg_name] = release_jobs.doit(job_params=job_params,
                                                  pkg_params=pp,
                                                  commit=commit,
                                                  jenkins_instance=jenkins_instance)
            #time.sleep(1)
            #print ('individual results', results[pkg_name])

    if wet_only:
        print ("wet only selected, skipping dry and delete")
        return results

    default_distros = job_params.distros
    target_arches = list(set([x for d in default_distros for x in job_params.arches[d]]))
    rosdistro = job_params.rosdistro
    jobs_graph = job_params.jobgraph

    if rosdistro == 'backports':
        print ("No dry backports support")
        return results

    if rosdistro == 'fuerte':
        packages_for_sync = 300
    elif rosdistro == 'groovy':
        packages_for_sync = 500
    elif rosdistro == 'hydro':
        packages_for_sync = 60
    else:
        packages_for_sync = 10000

    #dry stacks
    # dry dependencies
    d = rospkg.distro.load_distro(rospkg.distro.distro_uri(rosdistro))

    for s in sorted(d.stacks.iterkeys()):
        if whitelist_repos and s not in whitelist_repos:
            continue
        print ("Configuring DRY job [%s]" % s)
        if not d.stacks[s].version:
            print('- skipping "%s" since version is null' % s)
            continue
        results[debianize_package_name(rd.name, s)] = release_jobs.dry_doit(s, dry_maintainers[s], default_distros, target_arches, rosdistro, jobgraph=jobs_graph, commit=commit, jenkins_instance=jenkins_instance, packages_for_sync=packages_for_sync)
        #time.sleep(1)

    # special metapackages job
    if not whitelist_repos or 'metapackages' in whitelist_repos:
        results[debianize_package_name(rd.name, 'metapackages')] = release_jobs.dry_doit('metapackages', [], default_distros, target_arches, rosdistro, jobgraph=jobs_graph, commit=commit, jenkins_instance=jenkins_instance, packages_for_sync=packages_for_sync)

    if delete_extra_jobs:
        assert(not whitelist_repos)
        # clean up extra jobs
        configured_jobs = set()

        for jobs in results.values():
            release_jobs.summarize_results(*jobs)
            for e in jobs:
                configured_jobs.update(set(e))

        existing_jobs = set([j['name'] for j in jenkins_instance.get_jobs()])
        relevant_jobs = existing_jobs - configured_jobs
        relevant_jobs = [j for j in relevant_jobs if rosdistro in j and ('_sourcedeb' in j or '_binarydeb' in j)]

        for j in relevant_jobs:
            print('Job "%s" detected as extra' % j)
            if commit:
                jenkins_instance.delete_job(j)
                print('Deleted job "%s"' % j)

    return results
Example #6
0
    # even for wet_only the dry packages need to be consider, else they are not added as downstream dependencies for the wet jobs
    if not args.wet_only:
        stack_depends, dry_maintainers = release_jobs.dry_get_stack_dependencies(args.rosdistro)
        dry_jobgraph = release_jobs.dry_generate_jobgraph(args.rosdistro, dependencies, stack_depends)
    else:
        dry_maintainers = []

    combined_jobgraph = {}
    for k, v in dependencies.iteritems():
        combined_jobgraph[k] = v
    if not args.wet_only:
        for k, v in dry_jobgraph.iteritems():
            combined_jobgraph[k] = v

    # setup a job triggered by all other debjobs
    combined_jobgraph[debianize_package_name(args.rosdistro, 'metapackages')] = combined_jobgraph.keys()

    targets = get_targets(rd, args.distros, args.arches)
    jp = JobParams(rosdistro=args.rosdistro,
                   distros=targets.keys(),
                   arches=targets,
                   fqdn=args.fqdn,
                   jobgraph=combined_jobgraph,
                   rosdist_rep=args.rosdist_rep,
                   rd_object=rd)

    results_map = doit(job_params=jp,
                       packages=packages,
                       dry_maintainers=dry_maintainers,
                       commit=args.commit,
                       wet_only=args.wet_only,
Example #7
0
def doit(job_params,
         dry_maintainers,
         packages,
         rosdist_rep,
         wet_only=False,
         commit=False,
         delete_extra_jobs=False,
         whitelist_repos=None):

    jenkins_instance = None
    if commit or delete_extra_jobs:
        jenkins_config = jenkins_support.load_server_config_file(
            jenkins_support.get_default_catkin_debs_config())
        jenkins_instance = jenkins_support.JenkinsConfig_to_handle(
            jenkins_config)

    rosdistro = job_params.rosdistro
    rd = job_params.rd

    # We take the intersection of repo-specific targets with default
    # targets.
    results = {}

    for repo_name in sorted(rd.get_repositories()):
        if whitelist_repos and repo_name not in whitelist_repos:
            continue

        r = rd.get_repository(repo_name)

        print('Configuring WET repo "%s" at "%s" for "%s"' %
              (r.name, r.url, job_params.distros))
        p_list = [p.name for p in r.packages]
        for p in sorted(p_list):
            if not r.version:
                print('- skipping "%s" since version is null' % p)
                continue
            pkg_name = debianize_package_name(rosdistro, p)
            maintainers = rd.get_maintainers(p)
            pp = PackageParams(package_name=pkg_name,
                               package=packages[p],
                               release_uri=r.url,
                               short_package_name=p,
                               maintainers=maintainers)

            results[pkg_name] = release_jobs.doit(
                job_params=job_params,
                pkg_params=pp,
                commit=commit,
                jenkins_instance=jenkins_instance)
            #time.sleep(1)
            #print ('individual results', results[pkg_name])

    if wet_only:
        print("wet only selected, skipping dry and delete")
        return results

    default_distros = job_params.distros
    target_arches = list(
        set([x for d in default_distros for x in job_params.arches[d]]))
    rosdistro = job_params.rosdistro
    jobs_graph = job_params.jobgraph

    if rosdistro == 'backports':
        print("No dry backports support")
        return results

    if rosdistro == 'fuerte':
        packages_for_sync = 300
    elif rosdistro == 'groovy':
        packages_for_sync = 500
    elif rosdistro == 'hydro':
        packages_for_sync = 60
    else:
        packages_for_sync = 10000

    #dry stacks
    # dry dependencies
    d = rospkg.distro.load_distro(rospkg.distro.distro_uri(rosdistro))

    for s in sorted(d.stacks.iterkeys()):
        if whitelist_repos and s not in whitelist_repos:
            continue
        print("Configuring DRY job [%s]" % s)
        if not d.stacks[s].version:
            print('- skipping "%s" since version is null' % s)
            continue
        results[debianize_package_name(rd.name, s)] = release_jobs.dry_doit(
            s,
            dry_maintainers[s],
            default_distros,
            target_arches,
            rosdistro,
            jobgraph=jobs_graph,
            commit=commit,
            jenkins_instance=jenkins_instance,
            packages_for_sync=packages_for_sync)
        #time.sleep(1)

    # special metapackages job
    if not whitelist_repos or 'metapackages' in whitelist_repos:
        results[debianize_package_name(
            rd.name, 'metapackages')] = release_jobs.dry_doit(
                'metapackages', [],
                default_distros,
                target_arches,
                rosdistro,
                jobgraph=jobs_graph,
                commit=commit,
                jenkins_instance=jenkins_instance,
                packages_for_sync=packages_for_sync)

    if delete_extra_jobs:
        assert (not whitelist_repos)
        # clean up extra jobs
        configured_jobs = set()

        for jobs in results.values():
            release_jobs.summarize_results(*jobs)
            for e in jobs:
                configured_jobs.update(set(e))

        existing_jobs = set([j['name'] for j in jenkins_instance.get_jobs()])
        relevant_jobs = existing_jobs - configured_jobs
        relevant_jobs = [
            j for j in relevant_jobs
            if rosdistro in j and ('_sourcedeb' in j or '_binarydeb' in j)
        ]

        for j in relevant_jobs:
            print('Job "%s" detected as extra' % j)
            if commit:
                jenkins_instance.delete_job(j)
                print('Deleted job "%s"' % j)

    return results
Example #8
0
        stack_depends, dry_maintainers = release_jobs.dry_get_stack_dependencies(
            args.rosdistro)
        dry_jobgraph = release_jobs.dry_generate_jobgraph(
            args.rosdistro, dependencies, stack_depends)
    else:
        dry_maintainers = []

    combined_jobgraph = {}
    for k, v in dependencies.iteritems():
        combined_jobgraph[k] = v
    if not args.wet_only:
        for k, v in dry_jobgraph.iteritems():
            combined_jobgraph[k] = v

    # setup a job triggered by all other debjobs
    combined_jobgraph[debianize_package_name(
        args.rosdistro, 'metapackages')] = combined_jobgraph.keys()

    targets = get_targets(rd, args.distros, args.arches)
    jp = JobParams(rosdistro=args.rosdistro,
                   distros=targets.keys(),
                   arches=targets,
                   fqdn=args.fqdn,
                   jobgraph=combined_jobgraph,
                   rosdist_rep=args.rosdist_rep,
                   rd_object=rd)

    results_map = doit(job_params=jp,
                       packages=packages,
                       dry_maintainers=dry_maintainers,
                       commit=args.commit,
                       wet_only=args.wet_only,