def doit(rd, distros, arches, yum_target_repository, fqdn, jobs_graph, rosdistro, packages, dry_maintainers, commit=False, delete_extra_jobs=False, whitelist_repos=None):
    jenkins_instance = None
    if args.commit or delete_extra_jobs:
        jenkins_instance = jenkins_support.JenkinsConfig_to_handle(jenkins_support.load_server_config_file(jenkins_support.get_default_catkin_rpms_config()))

    # Figure out default distros.  Command-line arg takes precedence; if
    # it's not specified, then read targets.yaml.
    if distros:
        default_distros = distros
    else:
        default_distros = rd.get_target_distros()

    # TODO: pull arches from rosdistro
    target_arches = arches

    # We take the intersection of repo-specific targets with default
    # targets.
    results = {}

    for repo_name in sorted(rd.get_repo_list()):
        if whitelist_repos and repo_name not in whitelist_repos:
            continue

        r = rd.get_repo(repo_name)
        #todo add support for specific targets, needed in rosdistro.py too
        #if 'target' not in r or r['target'] == 'all':
        target_distros = default_distros
        #else:
        #    target_distros = list(set(r['target']) & set(default_distros))

        print ('Configuring WET repo "%s" at "%s" for "%s"' % (r.name, r.url, target_distros))

        for p in sorted(r.packages.iterkeys()):
            if not r.version:
                print('- skipping "%s" since version is null' % p)
                continue
            pkg_name = rd.rpmify_package_name(p)
            results[pkg_name] = release_jobs.doit(r.url,
                 pkg_name,
                 packages[p],
                 target_distros,
                 target_arches,
                 yum_target_repository,
                 fqdn,
                 jobs_graph,
                 rosdistro=rosdistro,
                 short_package_name=p,
                 commit=commit,
                 jenkins_instance=jenkins_instance)
            #time.sleep(1)
            #print ('individual results', results[pkg_name])

    if args.wet_only:
        print ("wet only selected, skipping dry and delete")
        return results

    if rosdistro == 'backports':
        print ("No dry backports support")
        return results

    if rosdistro == 'groovy':
        packages_for_sync = 500
    elif rosdistro == 'hydro':
        packages_for_sync = 60
    else:
        packages_for_sync = 10000

    #dry stacks
    # dry dependencies
    d = rospkg.distro.load_distro(rospkg.distro.distro_uri(rosdistro))

    for s in sorted(d.stacks.iterkeys()):
        if whitelist_repos and s not in whitelist_repos:
            continue
        print ("Configuring DRY job [%s]" % s)
        if not d.stacks[s].version:
            print('- skipping "%s" since version is null' % s)
            continue
        results[rd.rpmify_package_name(s)] = release_jobs.dry_doit(s, dry_maintainers[s], default_distros, target_arches, fqdn, rosdistro, jobgraph=jobs_graph, commit=commit, jenkins_instance=jenkins_instance, packages_for_sync=packages_for_sync)
        #time.sleep(1)

    # special metapackages job
    if not whitelist_repos or 'metapackages' in whitelist_repos:
        results[rd.rpmify_package_name('metapackages')] = release_jobs.dry_doit('metapackages', [], default_distros, target_arches, fqdn, rosdistro, jobgraph=jobs_graph, commit=commit, jenkins_instance=jenkins_instance, packages_for_sync=packages_for_sync)

    if not whitelist_repos or 'sync' in whitelist_repos:
        results[rd.rpmify_package_name('sync')] = release_jobs.dry_doit('sync', [], default_distros, target_arches, fqdn, rosdistro, jobgraph=jobs_graph, commit=commit, jenkins_instance=jenkins_instance, packages_for_sync=packages_for_sync)

    if delete_extra_jobs:
        assert(not whitelist_repos)
        # clean up extra jobs
        configured_jobs = set()

        for jobs in results.values():
            release_jobs.summarize_results(*jobs)
            for e in jobs:
                configured_jobs.update(set(e))

        existing_jobs = set([j['name'] for j in jenkins_instance.get_jobs()])
        relevant_jobs = existing_jobs - configured_jobs
        relevant_jobs = [j for j in relevant_jobs if rosdistro in j and ('_sourcerpm' in j or '_binaryrpm' in j)]

        for j in relevant_jobs:
            print('Job "%s" detected as extra' % j)
            if commit:
                jenkins_instance.delete_job(j)
                print('Deleted job "%s"' % j)

    return results
示例#2
0
def doit(job_params, dry_maintainers, packages, rosdist_rep,
         wet_only=False, commit = False, delete_extra_jobs = False, whitelist_repos = None):

    jenkins_instance = None
    if commit or delete_extra_jobs:
        jenkins_config = jenkins_support.load_server_config_file(jenkins_support.get_default_catkin_debs_config())
        jenkins_instance = jenkins_support.JenkinsConfig_to_handle(jenkins_config)

    rosdistro = job_params.rosdistro
    rd = job_params.rd


    # We take the intersection of repo-specific targets with default
    # targets.
    results = {}

    for repo_name in sorted(rd.get_repositories()):
        if whitelist_repos and repo_name not in whitelist_repos:
            continue

        r = rd.get_repository(repo_name)

        print ('Configuring WET repo "%s" at "%s" for "%s"' % (r.name, r.url, job_params.distros))
        p_list = [p.name for p in r.packages]
        for p in sorted(p_list):
            if not r.version:
                print('- skipping "%s" since version is null' % p)
                continue
            pkg_name = debianize_package_name(rosdistro, p)
            maintainers = rd.get_maintainers(p)
            pp = PackageParams(package_name=pkg_name,
                               package=packages[p],
                               release_uri=r.url,
                               short_package_name=p,
                               maintainers=maintainers)

            results[pkg_name] = release_jobs.doit(job_params=job_params,
                                                  pkg_params=pp,
                                                  commit=commit,
                                                  jenkins_instance=jenkins_instance)
            #time.sleep(1)
            #print ('individual results', results[pkg_name])

    if wet_only:
        print ("wet only selected, skipping dry and delete")
        return results

    default_distros = job_params.distros
    target_arches = list(set([x for d in default_distros for x in job_params.arches[d]]))
    rosdistro = job_params.rosdistro
    jobs_graph = job_params.jobgraph

    if rosdistro == 'backports':
        print ("No dry backports support")
        return results

    if rosdistro == 'fuerte':
        packages_for_sync = 300
    elif rosdistro == 'groovy':
        packages_for_sync = 500
    elif rosdistro == 'hydro':
        packages_for_sync = 60
    else:
        packages_for_sync = 10000

    #dry stacks
    # dry dependencies
    d = rospkg.distro.load_distro(rospkg.distro.distro_uri(rosdistro))

    for s in sorted(d.stacks.iterkeys()):
        if whitelist_repos and s not in whitelist_repos:
            continue
        print ("Configuring DRY job [%s]" % s)
        if not d.stacks[s].version:
            print('- skipping "%s" since version is null' % s)
            continue
        results[debianize_package_name(rd.name, s)] = release_jobs.dry_doit(s, dry_maintainers[s], default_distros, target_arches, rosdistro, jobgraph=jobs_graph, commit=commit, jenkins_instance=jenkins_instance, packages_for_sync=packages_for_sync)
        #time.sleep(1)

    # special metapackages job
    if not whitelist_repos or 'metapackages' in whitelist_repos:
        results[debianize_package_name(rd.name, 'metapackages')] = release_jobs.dry_doit('metapackages', [], default_distros, target_arches, rosdistro, jobgraph=jobs_graph, commit=commit, jenkins_instance=jenkins_instance, packages_for_sync=packages_for_sync)

    if delete_extra_jobs:
        assert(not whitelist_repos)
        # clean up extra jobs
        configured_jobs = set()

        for jobs in results.values():
            release_jobs.summarize_results(*jobs)
            for e in jobs:
                configured_jobs.update(set(e))

        existing_jobs = set([j['name'] for j in jenkins_instance.get_jobs()])
        relevant_jobs = existing_jobs - configured_jobs
        relevant_jobs = [j for j in relevant_jobs if rosdistro in j and ('_sourcedeb' in j or '_binarydeb' in j)]

        for j in relevant_jobs:
            print('Job "%s" detected as extra' % j)
            if commit:
                jenkins_instance.delete_job(j)
                print('Deleted job "%s"' % j)

    return results
示例#3
0
def doit(rd, distros, arches, target_repository, fqdn, jobs_graph, rosdistro, packages, dry_maintainers, commit=False, delete_extra_jobs=False, whitelist_repos=None, sourcepkg_timeout=None, binarypkg_timeout=None, ssh_key_id=None, platform='ubuntu'):
    jenkins_instance = None
    if args.commit or delete_extra_jobs:
        jenkins_instance = jenkins_support.JenkinsConfig_to_handle(jenkins_support.load_server_config_file(jenkins_support.get_default_catkin_debs_config()))

    # Figure out default distros.  Command-line arg takes precedence; if
    # it's not specified, then read targets.yaml.
    if distros:
        default_distros = distros
    else:
        default_distros = rd.get_target_distros()[platform]

    # TODO: pull arches from rosdistro
    target_arches = arches

    # We take the intersection of repo-specific targets with default
    # targets.
    results = {}

    for repo_name in sorted(rd.get_repo_list()):
        if whitelist_repos and repo_name not in whitelist_repos:
            continue

        r = rd.get_repo(repo_name)
        #todo add support for specific targets, needed in rosdistro.py too
        #if 'target' not in r or r['target'] == 'all':
        target_distros = default_distros
        #else:
        #    target_distros = list(set(r['target']) & set(default_distros))

        print('Configuring WET repo "%s" at "%s" for "%s"' % (r.name, r.url, target_distros))

        # TODO: Workaround until repos have rpm branches
        manual_workarounds = []
        if rosdistro == 'jade':
            manual_workarounds += ['bfl'] # https://github.com/ros-gbp/bfl-release/pull/9
            manual_workarounds += ['robot_upstart'] # missing daemontools
            # manual_workarounds += ['ueye_cam'] # https://github.com/anqixu/ueye_cam/pull/23
        elif rosdistro == 'indigo':
            pass
            # manual_workarounds += ['ardrone_autonomy'] # https://github.com/AutonomyLab/ardronelib/pull/1
            # manual_workarounds += ['bride'] # Missing build ids
            manual_workarounds += ['care_o_bot'] # https://github.com/ipa320/care-o-bot/issues/5
            # manual_workarounds += ['euslisp'] # https://github.com/tork-a/euslisp-release/pull/4
            # manual_workarounds += ['graft'] # https://github.com/ros-perception/graft/issues/23
            # manual_workarounds += ['hrpsys'] # https://bugzilla.redhat.com/1207045
            # manual_workarounds += ['joystick_drivers'] # https://github.com/ros-drivers/joystick_drivers/pull/66
            manual_workarounds += ['libnabo'] # -DSHARED_LIBS:BOOL=ON (no official rpm branch yet)
            # manual_workarounds += ['libpointmatcher'] # TODO: Not sure how to phrase this one yet
            # manual_workarounds += ['librms'] # https://github.com/ros/rosdistro/pull/6619
            # manual_workarounds += ['neo_driver'] # https://github.com/neobotix/neo_driver/pull/3
            # manual_workarounds += ['ocl'] # https://github.com/ros/rosdistro/pull/6959
            manual_workarounds += ['openni_camera'] # https://github.com/ros-drivers/openni_camera/pull/32
            manual_workarounds += ['openni2_camera'] # valid branch has wrong rosdep entry for openni2-devel
            manual_workarounds += ['razer_hydra'] # udev rules...
            manual_workarounds += ['robot_upstart'] # missing daemontools
            # manual_workarounds += ['srv_tools'] # https://github.com/srv/srv_tools/pull/3
            # manual_workarounds += ['stage'] # https://github.com/ros-simulation/stage_ros/issues/14
            # manual_workarounds += ['stage_ros'] # https://github.com/ros-simulation/stage_ros/issues/14
            # manual_workarounds += ['uwsim_bullet'] # https://github.com/uji-ros-pkg/uwsim_bullet/pull/1
            # manual_workarounds += ['warehouse_ros'] # https://github.com/ros-planning/warehouse_ros/pull/17

        import re
        expected_tags = ['rpm/%s-%s_%s' % (rd.debianize_package_name(r.packages.keys()[0]), r.full_version, target_distro) for target_distro in target_distros]
        if r.name in manual_workarounds or None in [verify_tags(r.url, expected_tag) for expected_tag in expected_tags]:
            re_url = re.match('(http|https|git|ssh)://(git@)?github\.com[:/]([^/]*)/(.*)', r.url)
            if not re_url:
                print('- failed to parse URL: %s' % r.url)
                continue
            temporary_url = '://github.com/smd-ros-rpm-release/%s' % re_url.group(4)
            expected_branch = 'rpm/' + rosdistro + '/*'
            if verify_heads('git' + temporary_url, expected_branch):
                r.url = 'https' + temporary_url
                print('- using workaround URL since no RPM branch exists: %s' % r.url)
            else:
                print('- skipping all of "%s" since no RPM branch or workaround repo exist' % r.name)
                continue
        # End workaround

        for p in sorted(r.packages.iterkeys()):
            if not r.version:
                print('- skipping "%s" since version is null' % p)
                continue
            pkg_name = rd.debianize_package_name(p)
            results[pkg_name] = release_jobs.doit(r.url,
                                                  pkg_name,
                                                  packages[p],
                                                  target_distros,
                                                  target_arches,
                                                  target_repository,
                                                  fqdn,
                                                  jobs_graph,
                                                  rosdistro=rosdistro,
                                                  short_package_name=p,
                                                  commit=commit,
                                                  jenkins_instance=jenkins_instance,
                                                  sourcepkg_timeout=sourcepkg_timeout,
                                                  binarypkg_timeout=binarypkg_timeout,
                                                  ssh_key_id=ssh_key_id,
                                                  platform=platform)
            #time.sleep(1)
            #print ('individual results', results[pkg_name])

    if args.wet_only:
        print("wet only selected, skipping dry and delete")
        return results

    if rosdistro == 'backports' or platform == 'fedora':
        print("No dry backports support")
        return results

    if rosdistro == 'fuerte':
        packages_for_sync = 300
    elif rosdistro == 'groovy':
        packages_for_sync = 740
    elif rosdistro == 'hydro':
        packages_for_sync = 865
    elif rosdistro == 'indigo':
        packages_for_sync = 1
    else:
        packages_for_sync = 10000

    if rosdistro == 'groovy':
        #dry stacks
        # dry dependencies
        d = rospkg.distro.load_distro(rospkg.distro.distro_uri(rosdistro))

        for s in sorted(d.stacks.iterkeys()):
            if whitelist_repos and s not in whitelist_repos:
                continue
            print("Configuring DRY job [%s]" % s)
            if not d.stacks[s].version:
                print('- skipping "%s" since version is null' % s)
                continue
            results[rd.debianize_package_name(s)] = release_jobs.dry_doit(s, dry_maintainers[s], default_distros, target_arches, fqdn, rosdistro, jobgraph=jobs_graph, commit=commit, jenkins_instance=jenkins_instance, packages_for_sync=packages_for_sync, ssh_key_id=ssh_key_id)
            #time.sleep(1)

    # special metapackages job
    if not whitelist_repos or 'metapackages' in whitelist_repos:
        results[rd.debianize_package_name('metapackages')] = release_jobs.dry_doit('metapackages', [], default_distros, target_arches, fqdn, rosdistro, jobgraph=jobs_graph, commit=commit, jenkins_instance=jenkins_instance, packages_for_sync=packages_for_sync, ssh_key_id=ssh_key_id)

    if not whitelist_repos or 'sync' in whitelist_repos:
        results[rd.debianize_package_name('sync')] = release_jobs.dry_doit('sync', [], default_distros, target_arches, fqdn, rosdistro, jobgraph=jobs_graph, commit=commit, jenkins_instance=jenkins_instance, packages_for_sync=packages_for_sync, ssh_key_id=ssh_key_id)

    if delete_extra_jobs:
        assert(not whitelist_repos)
        # clean up extra jobs
        configured_jobs = set()

        for jobs in results.values():
            release_jobs.summarize_results(*jobs)
            for e in jobs:
                configured_jobs.update(set(e))

        existing_jobs = set([j['name'] for j in jenkins_instance.get_jobs()])
        relevant_jobs = existing_jobs - configured_jobs
        relevant_jobs = [j for j in relevant_jobs if rosdistro in j and ('_sourcedeb' in j or '_binarydeb' in j)]

        for j in relevant_jobs:
            print('Job "%s" detected as extra' % j)
            if commit:
                jenkins_instance.delete_job(j)
                print('Deleted job "%s"' % j)

    return results
示例#4
0
def doit(rd,
         distros,
         arches,
         apt_target_repository,
         fqdn,
         jobs_graph,
         rosdistro,
         packages,
         dry_maintainers,
         commit=False,
         delete_extra_jobs=False,
         whitelist_repos=None,
         sourcedeb_timeout=None,
         binarydeb_timeout=None,
         ssh_key_id=None):
    jenkins_instance = None
    jenkins_jobs = []
    if args.commit or delete_extra_jobs:
        jenkins_instance = jenkins_support.JenkinsConfig_to_handle(
            jenkins_support.load_server_config_file(
                jenkins_support.get_default_catkin_debs_config()))
        try:
            jenkins_jobs = jenkins_instance.get_jobs()
        except urllib2.URLError as e:
            raise urllib2.URLError(str(e) + ' (%s)' % jenkins_instance.server)

    # Figure out default distros.  Command-line arg takes precedence; if
    # it's not specified, then read targets.yaml.
    if distros:
        default_distros = distros
    else:
        default_distros = rd.get_target_distros()

    # TODO: pull arches from rosdistro
    target_arches = arches

    # We take the intersection of repo-specific targets with default
    # targets.
    results = {}

    for repo_name in sorted(rd.get_repo_list()):
        if whitelist_repos and repo_name not in whitelist_repos:
            continue

        r = rd.get_repo(repo_name)
        #todo add support for specific targets, needed in rosdistro.py too
        #if 'target' not in r or r['target'] == 'all':
        target_distros = default_distros
        #else:
        #    target_distros = list(set(r['target']) & set(default_distros))

        print('Configuring WET repo "%s" at "%s" for "%s"' %
              (r.name, r.url, target_distros))

        for p in sorted(r.packages.iterkeys()):
            if not r.version:
                print('- skipping "%s" since version is null' % p)
                continue
            pkg_name = rd.debianize_package_name(p)
            results[pkg_name] = release_jobs.doit(
                r.url,
                pkg_name,
                packages[p],
                target_distros,
                target_arches,
                apt_target_repository,
                fqdn,
                jobs_graph,
                rosdistro=rosdistro,
                short_package_name=p,
                commit=commit,
                jenkins_instance=jenkins_instance,
                jenkins_jobs=jenkins_jobs,
                sourcedeb_timeout=sourcedeb_timeout,
                binarydeb_timeout=binarydeb_timeout,
                ssh_key_id=ssh_key_id)
            #time.sleep(1)
            #print ('individual results', results[pkg_name])

    if args.wet_only:
        print("wet only selected, skipping dry and delete")
        return results

    if rosdistro == 'backports':
        print("No dry backports support")
        return results

    if rosdistro == 'fuerte':
        packages_for_sync = 300
    elif rosdistro == 'groovy':
        packages_for_sync = 820
    elif rosdistro == 'hydro':
        packages_for_sync = 1500
    elif rosdistro == 'indigo':
        packages_for_sync = 1300
    elif rosdistro == 'jade':
        packages_for_sync = 700
    else:
        packages_for_sync = 10000

    if rosdistro == 'groovy':
        #dry stacks
        # dry dependencies
        d = rospkg.distro.load_distro(rospkg.distro.distro_uri(rosdistro))

        for s in sorted(d.stacks.iterkeys()):
            if whitelist_repos and s not in whitelist_repos:
                continue
            print("Configuring DRY job [%s]" % s)
            if not d.stacks[s].version:
                print('- skipping "%s" since version is null' % s)
                continue
            results[rd.debianize_package_name(s)] = release_jobs.dry_doit(
                s,
                dry_maintainers[s],
                default_distros,
                target_arches,
                fqdn,
                rosdistro,
                jobgraph=jobs_graph,
                commit=commit,
                jenkins_instance=jenkins_instance,
                jenkins_jobs=jenkins_jobs,
                packages_for_sync=packages_for_sync,
                ssh_key_id=ssh_key_id)
            #time.sleep(1)

    # special metapackages job
    if not whitelist_repos or 'metapackages' in whitelist_repos:
        results[rd.debianize_package_name(
            'metapackages')] = release_jobs.dry_doit(
                'metapackages', [],
                default_distros,
                target_arches,
                fqdn,
                rosdistro,
                jobgraph=jobs_graph,
                commit=commit,
                jenkins_instance=jenkins_instance,
                jenkins_jobs=jenkins_jobs,
                packages_for_sync=packages_for_sync,
                ssh_key_id=ssh_key_id)

    if not whitelist_repos or 'sync' in whitelist_repos:
        results[rd.debianize_package_name('sync')] = release_jobs.dry_doit(
            'sync', [],
            default_distros,
            target_arches,
            fqdn,
            rosdistro,
            jobgraph=jobs_graph,
            commit=commit,
            jenkins_instance=jenkins_instance,
            jenkins_jobs=jenkins_jobs,
            packages_for_sync=packages_for_sync,
            ssh_key_id=ssh_key_id)

    if delete_extra_jobs:
        assert (not whitelist_repos)
        # clean up extra jobs
        configured_jobs = set()

        for jobs in results.values():
            release_jobs.summarize_results(*jobs)
            for e in jobs:
                configured_jobs.update(set(e))

        existing_jobs = set([j['name'] for j in jenkins_jobs])
        relevant_jobs = existing_jobs - configured_jobs
        relevant_jobs = [
            j for j in relevant_jobs
            if rosdistro in j and ('_sourcedeb' in j or '_binarydeb' in j)
        ]

        for j in relevant_jobs:
            print('Job "%s" detected as extra' % j)
            if commit:
                jenkins_instance.delete_job(j)
                print('Deleted job "%s"' % j)

    return results
def doit(repo_map, package_names_by_url, distros, fqdn, jobs_graph, rosdistro, commit=False, delete_extra_jobs=False):
    jenkins_instance = None
    if args.commit or delete_extra_jobs:
        jenkins_instance = jenkins_support.JenkinsConfig_to_handle(jenkins_support.load_server_config_file(jenkins_support.get_default_catkin_debs_config()))

    # What ROS distro are we configuring?
    rosdistro = repo_map['release-name']
    
    

    # Figure out default distros.  Command-line arg takes precedence; if
    # it's not specified, then read targets.yaml.
    if distros:
        default_distros = distros
    else:
        print('Fetching "%s"' % (URL_PROTOTYPE % 'targets'))
        targets_map = yaml.load(urllib2.urlopen(URL_PROTOTYPE % 'targets'))
        my_targets = [x for x in targets_map if rosdistro in x]
        if len(my_targets) != 1:
            print('Must have exactly one entry for rosdistro "%s" in targets.yaml' % rosdistro)
            sys.exit(1)
        default_distros = my_targets[0][rosdistro]

    # We take the intersection of repo-specific targets with default
    # targets.
    results = {}
    for short_package_name, r in sorted(repo_map['repositories'].items()):
        if 'url' not in r:
            print('"url" key missing for repository "%s"; skipping' % r)
            continue
        url = r['url']
        if url not in package_names_by_url:
            print('Repo "%s" is missing from the list; must have been skipped (e.g., for missing a stack.xml)' % r)
            continue
        if 'target' not in r or r['target'] == 'all':
            target_distros = default_distros
        else:
            target_distros = list(set(r['target']) & set(default_distros))

        print ('Configuring WET stack "%s" for "%s"' % (r['url'], target_distros))

        results[package_names_by_url[url]] = release_jobs.doit(url,
             package_names_by_url[url],
             target_distros,
             fqdn,
             jobs_graph,
             rosdistro=rosdistro,
             short_package_name=short_package_name,
             commit=commit,
             jenkins_instance=jenkins_instance)
        time.sleep(1)
        #print ('individual results', results[package_names_by_url[url]])


    if args.wet_only:
        print ("wet only selected, skipping dry and delete")
        return results

    if rosdistro == 'backports':
        print ("no dry backports stopping execution")
        return results

    #dry stacks
    # dry dependencies
    d = rospkg.distro.load_distro(rospkg.distro.distro_uri(rosdistro))

    for s in d.stacks:
        print ("Configuring DRY job [%s]" % s)
        results[debianize_package_name(rosdistro, s) ] = release_jobs.dry_doit(s, default_distros, rosdistro, jobgraph=jobs_graph, commit=commit, jenkins_instance=jenkins_instance)
        time.sleep(1)

    # special metapackages job
    results[debianize_package_name(rosdistro, 'metapackages') ] = release_jobs.dry_doit('metapackages', default_distros, rosdistro, jobgraph=jobs_graph, commit=commit, jenkins_instance=jenkins_instance)

    if delete_extra_jobs:
        # clean up extra jobs
        configured_jobs = set()

        for _, v in results.iteritems():
            release_jobs.summarize_results(*v)
            for e in v:
                configured_jobs.update(set(e))

        existing_jobs = set([j['name'] for j in jenkins_instance.get_jobs()])
        relevant_jobs = existing_jobs - configured_jobs
        relevant_jobs = [j for j in relevant_jobs if rosdistro in j and ('sourcedeb' in j or 'binarydeb' in j)]

        for j in relevant_jobs:
            print('Job "%s" detected as extra' % j)
            if commit:
                jenkins_instance.delete_job(j)
                print('Deleted job "%s"' % j)

    return results
示例#6
0
def doit(job_params,
         dry_maintainers,
         packages,
         rosdist_rep,
         wet_only=False,
         commit=False,
         delete_extra_jobs=False,
         whitelist_repos=None):

    jenkins_instance = None
    if commit or delete_extra_jobs:
        jenkins_config = jenkins_support.load_server_config_file(
            jenkins_support.get_default_catkin_debs_config())
        jenkins_instance = jenkins_support.JenkinsConfig_to_handle(
            jenkins_config)

    rosdistro = job_params.rosdistro
    rd = job_params.rd

    # We take the intersection of repo-specific targets with default
    # targets.
    results = {}

    for repo_name in sorted(rd.get_repositories()):
        if whitelist_repos and repo_name not in whitelist_repos:
            continue

        r = rd.get_repository(repo_name)

        print('Configuring WET repo "%s" at "%s" for "%s"' %
              (r.name, r.url, job_params.distros))
        p_list = [p.name for p in r.packages]
        for p in sorted(p_list):
            if not r.version:
                print('- skipping "%s" since version is null' % p)
                continue
            pkg_name = debianize_package_name(rosdistro, p)
            maintainers = rd.get_maintainers(p)
            pp = PackageParams(package_name=pkg_name,
                               package=packages[p],
                               release_uri=r.url,
                               short_package_name=p,
                               maintainers=maintainers)

            results[pkg_name] = release_jobs.doit(
                job_params=job_params,
                pkg_params=pp,
                commit=commit,
                jenkins_instance=jenkins_instance)
            #time.sleep(1)
            #print ('individual results', results[pkg_name])

    if wet_only:
        print("wet only selected, skipping dry and delete")
        return results

    default_distros = job_params.distros
    target_arches = list(
        set([x for d in default_distros for x in job_params.arches[d]]))
    rosdistro = job_params.rosdistro
    jobs_graph = job_params.jobgraph

    if rosdistro == 'backports':
        print("No dry backports support")
        return results

    if rosdistro == 'fuerte':
        packages_for_sync = 300
    elif rosdistro == 'groovy':
        packages_for_sync = 500
    elif rosdistro == 'hydro':
        packages_for_sync = 60
    else:
        packages_for_sync = 10000

    #dry stacks
    # dry dependencies
    d = rospkg.distro.load_distro(rospkg.distro.distro_uri(rosdistro))

    for s in sorted(d.stacks.iterkeys()):
        if whitelist_repos and s not in whitelist_repos:
            continue
        print("Configuring DRY job [%s]" % s)
        if not d.stacks[s].version:
            print('- skipping "%s" since version is null' % s)
            continue
        results[debianize_package_name(rd.name, s)] = release_jobs.dry_doit(
            s,
            dry_maintainers[s],
            default_distros,
            target_arches,
            rosdistro,
            jobgraph=jobs_graph,
            commit=commit,
            jenkins_instance=jenkins_instance,
            packages_for_sync=packages_for_sync)
        #time.sleep(1)

    # special metapackages job
    if not whitelist_repos or 'metapackages' in whitelist_repos:
        results[debianize_package_name(
            rd.name, 'metapackages')] = release_jobs.dry_doit(
                'metapackages', [],
                default_distros,
                target_arches,
                rosdistro,
                jobgraph=jobs_graph,
                commit=commit,
                jenkins_instance=jenkins_instance,
                packages_for_sync=packages_for_sync)

    if delete_extra_jobs:
        assert (not whitelist_repos)
        # clean up extra jobs
        configured_jobs = set()

        for jobs in results.values():
            release_jobs.summarize_results(*jobs)
            for e in jobs:
                configured_jobs.update(set(e))

        existing_jobs = set([j['name'] for j in jenkins_instance.get_jobs()])
        relevant_jobs = existing_jobs - configured_jobs
        relevant_jobs = [
            j for j in relevant_jobs
            if rosdistro in j and ('_sourcedeb' in j or '_binarydeb' in j)
        ]

        for j in relevant_jobs:
            print('Job "%s" detected as extra' % j)
            if commit:
                jenkins_instance.delete_job(j)
                print('Deleted job "%s"' % j)

    return results