Ejemplo n.º 1
0
def trigger_hudson_build_debs(name, distro_name, os_platform):
    from buildfarm import jenkins_support
    import jenkins
    import urllib
    import urllib2
    jenkins_instance = jenkins_support.JenkinsConfig_to_handle(
        jenkins_support.load_server_config_file(
            jenkins_support.get_default_catkin_debs_config()))
    parameters = {
        'DISTRO_NAME': distro_name,
        'STACK_NAME': name,
        'OS_PLATFORM': os_platform,
    }
    for arch in ['i386', 'amd64']:
        parameters['ARCH'] = arch
        job_name = 'ros-%s-%s_binarydeb_%s_%s' % (
            distro_name, name.replace('_', '-'), os_platform, arch)
        print('triggering job: %s' % job_name)
        if not jenkins_instance.job_exists(job_name):
            raise jenkins.JenkinsException('no such job[%s]' % (job_name))
        # pass parameters to create a POST request instead of GET
        jenkins_instance.jenkins_open(
            urllib2.Request(jenkins_instance.build_job_url(job_name),
                            urllib.urlencode(parameters)))
Ejemplo n.º 2
0
                                'resources', 'static_jobs')

    templates = []
    for entry in sorted(os.listdir(template_dir)):
        if not entry.endswith('.xml'):
            continue
        templates.append(entry[:-4])

    jobs = []
    if args.jobs:
        for job in args.jobs:
            if job not in templates:
                print("Unknown job '%s'" % job, file=sys.stderr)
            else:
                jobs.append(job)
    else:
        for template in templates:
            jobs.append(template)

    jenkins_instance = jenkins_support.JenkinsConfig_to_handle(
        jenkins_support.load_server_config_file(
            jenkins_support.get_default_catkin_debs_config()))
    jenkins_jobs = jenkins_instance.get_jobs()

    for job in jobs:
        template_filename = os.path.join(template_dir, job + '.xml')
        with open(template_filename, 'r') as f:
            config = f.read()
            create_jenkins_job(jenkins_instance, jenkins_jobs, job, config,
                               args.commit)
Ejemplo n.º 3
0
def doit(rd,
         distros,
         arches,
         apt_target_repository,
         fqdn,
         jobs_graph,
         rosdistro,
         packages,
         dry_maintainers,
         commit=False,
         delete_extra_jobs=False,
         whitelist_repos=None,
         sourcedeb_timeout=None,
         binarydeb_timeout=None,
         ssh_key_id=None):
    jenkins_instance = None
    jenkins_jobs = []
    if args.commit or delete_extra_jobs:
        jenkins_instance = jenkins_support.JenkinsConfig_to_handle(
            jenkins_support.load_server_config_file(
                jenkins_support.get_default_catkin_debs_config()))
        try:
            jenkins_jobs = jenkins_instance.get_jobs()
        except urllib2.URLError as e:
            raise urllib2.URLError(str(e) + ' (%s)' % jenkins_instance.server)

    # Figure out default distros.  Command-line arg takes precedence; if
    # it's not specified, then read targets.yaml.
    if distros:
        default_distros = distros
    else:
        default_distros = rd.get_target_distros()

    # TODO: pull arches from rosdistro
    target_arches = arches

    # We take the intersection of repo-specific targets with default
    # targets.
    results = {}

    for repo_name in sorted(rd.get_repo_list()):
        if whitelist_repos and repo_name not in whitelist_repos:
            continue

        r = rd.get_repo(repo_name)
        #todo add support for specific targets, needed in rosdistro.py too
        #if 'target' not in r or r['target'] == 'all':
        target_distros = default_distros
        #else:
        #    target_distros = list(set(r['target']) & set(default_distros))

        print('Configuring WET repo "%s" at "%s" for "%s"' %
              (r.name, r.url, target_distros))

        for p in sorted(r.packages.iterkeys()):
            if not r.version:
                print('- skipping "%s" since version is null' % p)
                continue
            pkg_name = rd.debianize_package_name(p)
            results[pkg_name] = release_jobs.doit(
                r.url,
                pkg_name,
                packages[p],
                target_distros,
                target_arches,
                apt_target_repository,
                fqdn,
                jobs_graph,
                rosdistro=rosdistro,
                short_package_name=p,
                commit=commit,
                jenkins_instance=jenkins_instance,
                jenkins_jobs=jenkins_jobs,
                sourcedeb_timeout=sourcedeb_timeout,
                binarydeb_timeout=binarydeb_timeout,
                ssh_key_id=ssh_key_id)
            #time.sleep(1)
            #print ('individual results', results[pkg_name])

    if args.wet_only:
        print("wet only selected, skipping dry and delete")
        return results

    if rosdistro == 'backports':
        print("No dry backports support")
        return results

    if rosdistro == 'fuerte':
        packages_for_sync = 300
    elif rosdistro == 'groovy':
        packages_for_sync = 820
    elif rosdistro == 'hydro':
        packages_for_sync = 1500
    elif rosdistro == 'indigo':
        packages_for_sync = 1300
    elif rosdistro == 'jade':
        packages_for_sync = 700
    else:
        packages_for_sync = 10000

    if rosdistro == 'groovy':
        #dry stacks
        # dry dependencies
        d = rospkg.distro.load_distro(rospkg.distro.distro_uri(rosdistro))

        for s in sorted(d.stacks.iterkeys()):
            if whitelist_repos and s not in whitelist_repos:
                continue
            print("Configuring DRY job [%s]" % s)
            if not d.stacks[s].version:
                print('- skipping "%s" since version is null' % s)
                continue
            results[rd.debianize_package_name(s)] = release_jobs.dry_doit(
                s,
                dry_maintainers[s],
                default_distros,
                target_arches,
                fqdn,
                rosdistro,
                jobgraph=jobs_graph,
                commit=commit,
                jenkins_instance=jenkins_instance,
                jenkins_jobs=jenkins_jobs,
                packages_for_sync=packages_for_sync,
                ssh_key_id=ssh_key_id)
            #time.sleep(1)

    # special metapackages job
    if not whitelist_repos or 'metapackages' in whitelist_repos:
        results[rd.debianize_package_name(
            'metapackages')] = release_jobs.dry_doit(
                'metapackages', [],
                default_distros,
                target_arches,
                fqdn,
                rosdistro,
                jobgraph=jobs_graph,
                commit=commit,
                jenkins_instance=jenkins_instance,
                jenkins_jobs=jenkins_jobs,
                packages_for_sync=packages_for_sync,
                ssh_key_id=ssh_key_id)

    if not whitelist_repos or 'sync' in whitelist_repos:
        results[rd.debianize_package_name('sync')] = release_jobs.dry_doit(
            'sync', [],
            default_distros,
            target_arches,
            fqdn,
            rosdistro,
            jobgraph=jobs_graph,
            commit=commit,
            jenkins_instance=jenkins_instance,
            jenkins_jobs=jenkins_jobs,
            packages_for_sync=packages_for_sync,
            ssh_key_id=ssh_key_id)

    if delete_extra_jobs:
        assert (not whitelist_repos)
        # clean up extra jobs
        configured_jobs = set()

        for jobs in results.values():
            release_jobs.summarize_results(*jobs)
            for e in jobs:
                configured_jobs.update(set(e))

        existing_jobs = set([j['name'] for j in jenkins_jobs])
        relevant_jobs = existing_jobs - configured_jobs
        relevant_jobs = [
            j for j in relevant_jobs
            if rosdistro in j and ('_sourcedeb' in j or '_binarydeb' in j)
        ]

        for j in relevant_jobs:
            print('Job "%s" detected as extra' % j)
            if commit:
                jenkins_instance.delete_job(j)
                print('Deleted job "%s"' % j)

    return results
Ejemplo n.º 4
0
def doit(repo_map, package_names_by_url, distros, fqdn, jobs_graph, rosdistro, commit=False, delete_extra_jobs=False):
    jenkins_instance = None
    if args.commit or delete_extra_jobs:
        jenkins_instance = jenkins_support.JenkinsConfig_to_handle(jenkins_support.load_server_config_file(jenkins_support.get_default_catkin_debs_config()))

    # What ROS distro are we configuring?
    rosdistro = repo_map['release-name']
    
    

    # Figure out default distros.  Command-line arg takes precedence; if
    # it's not specified, then read targets.yaml.
    if distros:
        default_distros = distros
    else:
        print('Fetching "%s"' % (URL_PROTOTYPE % 'targets'))
        targets_map = yaml.load(urllib2.urlopen(URL_PROTOTYPE % 'targets'))
        my_targets = [x for x in targets_map if rosdistro in x]
        if len(my_targets) != 1:
            print('Must have exactly one entry for rosdistro "%s" in targets.yaml' % rosdistro)
            sys.exit(1)
        default_distros = my_targets[0][rosdistro]

    # We take the intersection of repo-specific targets with default
    # targets.
    results = {}
    for short_package_name, r in sorted(repo_map['repositories'].items()):
        if 'url' not in r:
            print('"url" key missing for repository "%s"; skipping' % r)
            continue
        url = r['url']
        if url not in package_names_by_url:
            print('Repo "%s" is missing from the list; must have been skipped (e.g., for missing a stack.xml)' % r)
            continue
        if 'target' not in r or r['target'] == 'all':
            target_distros = default_distros
        else:
            target_distros = list(set(r['target']) & set(default_distros))

        print ('Configuring WET stack "%s" for "%s"' % (r['url'], target_distros))

        results[package_names_by_url[url]] = release_jobs.doit(url,
             package_names_by_url[url],
             target_distros,
             fqdn,
             jobs_graph,
             rosdistro=rosdistro,
             short_package_name=short_package_name,
             commit=commit,
             jenkins_instance=jenkins_instance)
        time.sleep(1)
        #print ('individual results', results[package_names_by_url[url]])


    if args.wet_only:
        print ("wet only selected, skipping dry and delete")
        return results

    if rosdistro == 'backports':
        print ("no dry backports stopping execution")
        return results

    #dry stacks
    # dry dependencies
    d = rospkg.distro.load_distro(rospkg.distro.distro_uri(rosdistro))

    for s in d.stacks:
        print ("Configuring DRY job [%s]" % s)
        results[debianize_package_name(rosdistro, s) ] = release_jobs.dry_doit(s, default_distros, rosdistro, jobgraph=jobs_graph, commit=commit, jenkins_instance=jenkins_instance)
        time.sleep(1)

    # special metapackages job
    results[debianize_package_name(rosdistro, 'metapackages') ] = release_jobs.dry_doit('metapackages', default_distros, rosdistro, jobgraph=jobs_graph, commit=commit, jenkins_instance=jenkins_instance)

    if delete_extra_jobs:
        # clean up extra jobs
        configured_jobs = set()

        for _, v in results.iteritems():
            release_jobs.summarize_results(*v)
            for e in v:
                configured_jobs.update(set(e))

        existing_jobs = set([j['name'] for j in jenkins_instance.get_jobs()])
        relevant_jobs = existing_jobs - configured_jobs
        relevant_jobs = [j for j in relevant_jobs if rosdistro in j and ('sourcedeb' in j or 'binarydeb' in j)]

        for j in relevant_jobs:
            print('Job "%s" detected as extra' % j)
            if commit:
                jenkins_instance.delete_job(j)
                print('Deleted job "%s"' % j)

    return results
Ejemplo n.º 5
0
def doit(repo_map,
         stacks,
         distros,
         fqdn,
         rosdistro,
         commit=False,
         delete_extra_jobs=False):
    jenkins_instance = None
    if args.commit or delete_extra_jobs:
        jenkins_instance = jenkins_support.JenkinsConfig_to_handle(
            jenkins_support.load_server_config_file(
                jenkins_support.get_default_catkin_debs_config()))

    # What ROS distro are we configuring?
    rosdistro = repo_map['release-name']

    # Figure out default distros.  Command-line arg takes precedence; if
    # it's not specified, then read targets.yaml.
    if distros:
        default_distros = distros
    else:
        print('Fetching "%s"' % (URL_PROTOTYPE % 'targets'))
        targets_map = yaml.load(urllib2.urlopen(URL_PROTOTYPE % 'targets'))
        my_targets = [x for x in targets_map if rosdistro in x]
        if len(my_targets) != 1:
            print(
                'Must have exactly one entry for rosdistro "%s" in targets.yaml'
                % rosdistro)
            sys.exit(1)
        default_distros = my_targets[0][rosdistro]

    # We take the intersection of repo-specific targets with default
    # targets.
    results = {}
    for short_package_name, r in repo_map['repositories'].items():
        if 'type' not in r or 'url' not in r:
            print('"type" or "url" key missing for repository "%s"; skipping' %
                  r)
            continue
        vcs_type = r['type']
        url = r['url']
        version = None
        if vcs_type != 'svn':
            if 'version' not in r:
                print(
                    '"version" key missing for SVN repository "%s"; skipping' %
                    r)
                continue
            else:
                version = r['version']
        if 'target' not in r or r['target'] == 'all':
            target_distros = default_distros
        else:
            target_distros = list(set(r['target']) & set(default_distros))

        print('Configuring "%s" for "%s"' % (r['url'], target_distros))

        results[short_package_name] = devel_jobs.doit(
            vcs_type,
            url,
            version,
            short_package_name,
            stacks[short_package_name],
            target_distros,
            fqdn,
            rosdistro=rosdistro,
            short_package_name=short_package_name,
            commit=commit,
            jenkins_instance=jenkins_instance)
        print('individual results', results[short_package_name])

    if delete_extra_jobs:
        # clean up extra jobs
        configured_jobs = set()

        for _, v in results.iteritems():
            devel_jobs.summarize_results(*v)
            for e in v:
                configured_jobs.update(set(e))

        existing_jobs = set([j['name'] for j in jenkins_instance.get_jobs()])
        relevant_jobs = existing_jobs - configured_jobs
        relevant_jobs = [
            j for j in relevant_jobs
            if j.startswith('ros-%s-' % rosdistro) and '_devel_' in j
        ]

        for j in relevant_jobs:
            print('Job "%s" detected as extra' % j)
            if commit:
                jenkins_instance.delete_job(j)
                print('Deleted job "%s"' % j)

    return results
Ejemplo n.º 6
0
def doit(job_params,
         dry_maintainers,
         packages,
         rosdist_rep,
         wet_only=False,
         commit=False,
         delete_extra_jobs=False,
         whitelist_repos=None):

    jenkins_instance = None
    if commit or delete_extra_jobs:
        jenkins_config = jenkins_support.load_server_config_file(
            jenkins_support.get_default_catkin_debs_config())
        jenkins_instance = jenkins_support.JenkinsConfig_to_handle(
            jenkins_config)

    rosdistro = job_params.rosdistro
    rd = job_params.rd

    # We take the intersection of repo-specific targets with default
    # targets.
    results = {}

    for repo_name in sorted(rd.get_repositories()):
        if whitelist_repos and repo_name not in whitelist_repos:
            continue

        r = rd.get_repository(repo_name)

        print('Configuring WET repo "%s" at "%s" for "%s"' %
              (r.name, r.url, job_params.distros))
        p_list = [p.name for p in r.packages]
        for p in sorted(p_list):
            if not r.version:
                print('- skipping "%s" since version is null' % p)
                continue
            pkg_name = debianize_package_name(rosdistro, p)
            maintainers = rd.get_maintainers(p)
            pp = PackageParams(package_name=pkg_name,
                               package=packages[p],
                               release_uri=r.url,
                               short_package_name=p,
                               maintainers=maintainers)

            results[pkg_name] = release_jobs.doit(
                job_params=job_params,
                pkg_params=pp,
                commit=commit,
                jenkins_instance=jenkins_instance)
            #time.sleep(1)
            #print ('individual results', results[pkg_name])

    if wet_only:
        print("wet only selected, skipping dry and delete")
        return results

    default_distros = job_params.distros
    target_arches = list(
        set([x for d in default_distros for x in job_params.arches[d]]))
    rosdistro = job_params.rosdistro
    jobs_graph = job_params.jobgraph

    if rosdistro == 'backports':
        print("No dry backports support")
        return results

    if rosdistro == 'fuerte':
        packages_for_sync = 300
    elif rosdistro == 'groovy':
        packages_for_sync = 500
    elif rosdistro == 'hydro':
        packages_for_sync = 60
    else:
        packages_for_sync = 10000

    #dry stacks
    # dry dependencies
    d = rospkg.distro.load_distro(rospkg.distro.distro_uri(rosdistro))

    for s in sorted(d.stacks.iterkeys()):
        if whitelist_repos and s not in whitelist_repos:
            continue
        print("Configuring DRY job [%s]" % s)
        if not d.stacks[s].version:
            print('- skipping "%s" since version is null' % s)
            continue
        results[debianize_package_name(rd.name, s)] = release_jobs.dry_doit(
            s,
            dry_maintainers[s],
            default_distros,
            target_arches,
            rosdistro,
            jobgraph=jobs_graph,
            commit=commit,
            jenkins_instance=jenkins_instance,
            packages_for_sync=packages_for_sync)
        #time.sleep(1)

    # special metapackages job
    if not whitelist_repos or 'metapackages' in whitelist_repos:
        results[debianize_package_name(
            rd.name, 'metapackages')] = release_jobs.dry_doit(
                'metapackages', [],
                default_distros,
                target_arches,
                rosdistro,
                jobgraph=jobs_graph,
                commit=commit,
                jenkins_instance=jenkins_instance,
                packages_for_sync=packages_for_sync)

    if delete_extra_jobs:
        assert (not whitelist_repos)
        # clean up extra jobs
        configured_jobs = set()

        for jobs in results.values():
            release_jobs.summarize_results(*jobs)
            for e in jobs:
                configured_jobs.update(set(e))

        existing_jobs = set([j['name'] for j in jenkins_instance.get_jobs()])
        relevant_jobs = existing_jobs - configured_jobs
        relevant_jobs = [
            j for j in relevant_jobs
            if rosdistro in j and ('_sourcedeb' in j or '_binarydeb' in j)
        ]

        for j in relevant_jobs:
            print('Job "%s" detected as extra' % j)
            if commit:
                jenkins_instance.delete_job(j)
                print('Deleted job "%s"' % j)

    return results