Esempio n. 1
0
def dry_binarydeb_jobs(stackname, dry_maintainers, rosdistro, distros, arches, fqdn, jobgraph, packages_for_sync):
    jenkins_config = jenkins_support.load_server_config_file(jenkins_support.get_default_catkin_debs_config())
    package = debianize_package_name(rosdistro, stackname)
    d = dict(
        FQDN=fqdn,
        PACKAGE=package,
        ROSDISTRO=rosdistro,
        STACK_NAME=stackname,
        NOTIFICATION_EMAIL=' '.join(dry_maintainers),
        USERNAME=jenkins_config.username,
        IS_METAPACKAGES=(stackname == 'metapackages'),
        PACKAGES_FOR_SYNC=str(packages_for_sync)
    )
    jobs = []
    for distro in distros:
        for arch in arches:
            d['ARCH'] = arch
            d['DISTRO'] = distro

            d["CHILD_PROJECTS"] = calc_child_jobs(package, distro, arch, jobgraph)
            d["DEPENDENTS"] = "True"
            if stackname == 'sync':
                d["CHILD_PROJECTS"] = debianize_package_name(rosdistro, 'metapackage')
                d['DISTROS'] = distros
                d['ARCHES'] = arches
                config = create_sync_binarydeb_config(d)
            else:
                config = create_dry_binarydeb_config(d)
            #print(config)
            job_name = binarydeb_job_name(package, distro, arch)
            jobs.append((job_name, config))
            #print ("config of %s is %s" % (job_name, config))
    return jobs
Esempio n. 2
0
def get_jenkins_dependencies(rosdistro, packages):
    result = {}
    for pkg_name in sorted(packages.keys()):
        p = packages[pkg_name]
        deb_name = debianize_package_name(rosdistro, p.name)
        build_depends = _get_depends(packages, p, recursive=False, buildtime=True)
        run_depends = _get_depends(packages, p, recursive=False, buildtime=False)

        # switching to only set first level dependencies to clean up clutter in jenkins instead of the recursive ones below
        result[deb_name] = [debianize_package_name(rosdistro, d.name) for d in build_depends | run_depends]
        continue

    return result
Esempio n. 3
0
def dry_generate_jobgraph(rosdistro, wet_jobgraph, stack_depends):
    jobgraph = {}
    for key, val in stack_depends.iteritems():
        dry_depends = [debianize_package_name(rosdistro, p) for p in val]

        untracked_wet_packages = [p for p in dry_depends if p in wet_jobgraph]

        extra_packages = set()
        for p in untracked_wet_packages:
            #print("adding packages for %s - [%s] " % (p, ', '.join(wet_jobgraph[p])) )
            extra_packages.update(wet_jobgraph[p])

        jobgraph[debianize_package_name(rosdistro, key)] = dry_depends + list(extra_packages)
    return jobgraph
def dry_generate_jobgraph(rosdistro, wet_jobgraph, stack_depends):
    jobgraph = {}
    for key, val in stack_depends.iteritems():
        dry_depends = [debianize_package_name(rosdistro, p) for p in val]

        untracked_wet_packages = [p for p in dry_depends if p in wet_jobgraph]

        extra_packages = set()
        for p in untracked_wet_packages:
            #print("adding packages for %s - [%s] " % (p, ', '.join(wet_jobgraph[p])) )
            extra_packages.update(wet_jobgraph[p])

        jobgraph[debianize_package_name(
            rosdistro, key)] = dry_depends + list(extra_packages)
    return jobgraph
Esempio n. 5
0
def get_dependencies(rosdistro, stacks):
    packages = {}
    build_dependencies = {}
    runtime_dependencies = {}

    for name in sorted(stacks.keys()):
        stack = stacks[name]

        if stack is None:
            packages[name] = sanitize_package_name(name)
            build_dependencies[name] = []
            runtime_dependencies[name] = []
        else:
            catkin_project_name = stack.name
            packages[catkin_project_name] = debianize_package_name(rosdistro, catkin_project_name)
            build_dependencies[catkin_project_name] = [d.name for d in stack.build_depends]
            runtime_dependencies[catkin_project_name] = [d.name for d in stack.depends]

    result = {}
    # combines direct buildtime- and recursive runtime-dependencies
    for k in packages.keys():
        #print '\nDependencies for: ', k
        build_deps = _get_dependencies(build_dependencies, k, packages)
        # recursive runtime depends of build depends
        recursive_runtime_dependencies = _get_dependencies(runtime_dependencies, k, packages, True)
        #print 'Recursive runtime-dependencies:', ', '.join(recursive_runtime_dependencies)
        result[packages[k]] = build_deps | recursive_runtime_dependencies
        #print 'Combined dependencies:', ', '.join(result[packages[k]])
    return result
def dry_binarydeb_jobs(stackname, dry_maintainers, rosdistro, distros, arches,
                       fqdn, jobgraph, packages_for_sync, ssh_key_id):
    jenkins_config = jenkins_support.load_server_config_file(
        jenkins_support.get_default_catkin_debs_config())
    package = debianize_package_name(rosdistro, stackname)
    d = dict(FQDN=fqdn,
             PACKAGE=package,
             ROSDISTRO=rosdistro,
             STACK_NAME=stackname,
             NOTIFICATION_EMAIL=' '.join(dry_maintainers),
             USERNAME=jenkins_config.username,
             IS_METAPACKAGES=(stackname == 'metapackages'),
             PACKAGES_FOR_SYNC=str(packages_for_sync),
             SSH_KEY_ID=ssh_key_id)
    jobs = []
    for distro in distros:
        for arch in arches:
            d['ARCH'] = arch
            d['DISTRO'] = distro

            d["CHILD_PROJECTS"] = calc_child_jobs(package, distro, arch,
                                                  jobgraph)
            d["DEPENDENTS"] = "True"
            if stackname == 'sync':
                d["CHILD_PROJECTS"] = []  # Sync jobs are final
                d['DISTROS'] = distros
                d['ARCHES'] = arches
                config = create_sync_binarydeb_config(d)
            else:
                config = create_dry_binarydeb_config(d)
            #print(config)
            job_name = binarydeb_job_name(package, distro, arch)
            jobs.append((job_name, config))
            #print ("config of %s is %s" % (job_name, config))
    return jobs
def get_jenkins_dependencies(rosdistro, packages):
    result = {}
    for pkg_name in sorted(packages.keys()):
        p = packages[pkg_name]
        deb_name = debianize_package_name(rosdistro, p.name)
        build_depends = _get_depends(packages,
                                     p,
                                     recursive=False,
                                     buildtime=True)
        run_depends = _get_depends(packages,
                                   p,
                                   recursive=False,
                                   buildtime=False)

        # switching to only set first level dependencies to clean up clutter in jenkins instead of the recursive ones below
        result[deb_name] = [
            debianize_package_name(rosdistro, d.name)
            for d in build_depends | run_depends
        ]
        continue

    return result
Esempio n. 8
0
 def debianize_package_name(self, package_name):
     return debianize_package_name(self._rosdistro, package_name)
Esempio n. 9
0
def compute_missing(distros, arches, fqdn, rosdistro, sourcedeb_only=False):
    """ Compute what packages are missing from a repo based on the rosdistro files, both wet and dry. """

    repo_url = 'http://%s/repos/building' % fqdn

    if rosdistro != 'fuerte':
        from ros_distro import Rosdistro
    else:
        from ros_distro_fuerte import Rosdistro
    rd = Rosdistro(rosdistro)
    # We take the intersection of repo-specific targets with default
    # targets.

    if distros:
        target_distros = distros
    else:
        target_distros = rd.get_target_distros()

    missing = {}
    for short_package_name in rd.get_package_list():
        #print ('Analyzing WET stack "%s" for "%s"' % (r['url'], target_distros))

        # todo check if sourcedeb is present with the right version
        deb_name = debianize_package_name(rosdistro, short_package_name)
        expected_version = rd.get_version(short_package_name, full_version=True)

        # Don't report packages as missing if their version is None
        if not expected_version:
            print("Skipping package %s with no version" % short_package_name)
            continue

        missing[short_package_name] = []
        for d in target_distros:
            if not repo.deb_in_repo(repo_url, deb_name, str(expected_version) + d, d, arch='na', source=True):
                missing[short_package_name].append('%s_source' % d)
            if not sourcedeb_only:
                for a in arches:
                    if not repo.deb_in_repo(repo_url, deb_name, str(expected_version) + ".*", d, a):
                        missing[short_package_name].append('%s_%s' % (d, a))

    if not sourcedeb_only:
        #dry stacks
        # dry dependencies
        dist = load_distro(distro_uri(rosdistro))

        distro_arches = []
        for d in target_distros:
            for a in arches:
                distro_arches.append((d, a))

        for s in dist.stacks:
            #print ("Analyzing DRY job [%s]" % s)
            expected_version = dry_get_stack_version(s, dist)
            # Don't report packages as missing if their version is None
            if not expected_version:
                print("Skipping package %s with no version" % s)
                continue
            missing[s] = []
            # for each distro arch check if the deb is present. If not trigger the build.
            for (d, a) in distro_arches:
                if not repo.deb_in_repo(repo_url, debianize_package_name(rosdistro, s), expected_version + ".*", d, a):
                    missing[s].append('%s_%s' % (d, a))

    return missing
Esempio n. 10
0
def compute_missing(distros, arches, fqdn, rosdistro, sourcedeb_only=False):
    """ Compute what packages are missing from a repo based on the rosdistro files, both wet and dry. """

    repo_url = 'http://%s/repos/building' % fqdn

    if rosdistro != 'fuerte':
        from ros_distro import Rosdistro
    else:
        from ros_distro_fuerte import Rosdistro
    rd = Rosdistro(rosdistro)
    # We take the intersection of repo-specific targets with default
    # targets.

    if distros:
        target_distros = distros
    else:
        target_distros = rd.get_target_distros()

    missing = {}
    for short_package_name in rd.get_package_list():
        #print ('Analyzing WET stack "%s" for "%s"' % (r['url'], target_distros))

        # todo check if sourcedeb is present with the right version
        deb_name = debianize_package_name(rosdistro, short_package_name)
        expected_version = rd.get_version(short_package_name,
                                          full_version=True)

        # Don't report packages as missing if their version is None
        if not expected_version:
            print("Skipping package %s with no version" % short_package_name)
            continue

        missing[short_package_name] = []
        for d in target_distros:
            if not repo.deb_in_repo(repo_url,
                                    deb_name,
                                    str(expected_version) + d,
                                    d,
                                    arch='na',
                                    source=True):
                missing[short_package_name].append('%s_source' % d)
            if not sourcedeb_only:
                for a in arches:
                    if not repo.deb_in_repo(repo_url, deb_name,
                                            str(expected_version) + ".*", d,
                                            a):
                        missing[short_package_name].append('%s_%s' % (d, a))

    if not sourcedeb_only and rosdistro == 'groovy':
        #dry stacks
        # dry dependencies
        dist = load_distro(distro_uri(rosdistro))

        distro_arches = []
        for d in target_distros:
            for a in arches:
                distro_arches.append((d, a))

        for s in dist.stacks:
            #print ("Analyzing DRY job [%s]" % s)
            expected_version = dry_get_stack_version(s, dist)
            # Don't report packages as missing if their version is None
            if not expected_version:
                print("Skipping package %s with no version" % s)
                continue
            missing[s] = []
            # for each distro arch check if the deb is present. If not trigger the build.
            for (d, a) in distro_arches:
                if not repo.deb_in_repo(repo_url,
                                        debianize_package_name(rosdistro, s),
                                        expected_version + ".*", d, a):
                    missing[s].append('%s_%s' % (d, a))

    return missing
Esempio n. 11
0
def compute_missing(distros, arches, fqdn, rosdistro, sourcepkg_only=False, platform='ubuntu'):
    """ Compute what packages are missing from a repo based on the rosdistro files, both wet and dry. """

    repo_url = 'http://%s/repos/building/%s' % (fqdn, platform)

    # TODO Building Repo Workaround
    if platform == 'ubuntu':
        repo_url = 'http://%s/repos/building' % fqdn
    elif platform == 'fedora':
        repo_url = 'http://%s/smd-ros-building/%s' % (fqdn, platform)
    # End Workaround

    if rosdistro != 'fuerte':
        from ros_distro import Rosdistro
    else:
        from ros_distro_fuerte import Rosdistro
    rd = Rosdistro(rosdistro)
    # We take the intersection of repo-specific targets with default
    # targets.

    if distros:
        target_distros = distros
    else:
        target_distros = rd.get_target_distros()[platform]

    missing = {}
    for short_package_name in rd.get_package_list():
        #print ('Analyzing WET stack "%s" for "%s"' % (r['url'], target_distros))

        # todo check if sourcepkg is present with the right version
        pkg_name = debianize_package_name(rosdistro, short_package_name)
        expected_version = rd.get_version(short_package_name, full_version=True)

        # Don't report packages as missing if their version is None
        if not expected_version:
            print("Skipping package %s with no version" % short_package_name)
            continue

        missing[short_package_name] = []
        for d in target_distros:
            if platform == 'fedora':
                version_extra = ".fc" + str(get_fedora_ver(d))
            else:
                version_extra = ".*"
            if not repo.pkg_in_repo(repo_url, pkg_name, get_full_version(expected_version, d, platform), d, arch='na', source=True, platform=platform):
                missing[short_package_name].append('%s_source' % d)
            if not sourcepkg_only:
                for a in arches:
                    if not repo.pkg_in_repo(repo_url, pkg_name, str(expected_version) + version_extra, d, a, platform=platform):
                        missing[short_package_name].append('%s_%s' % (d, a))

    if not sourcepkg_only and rosdistro == 'groovy' and not platform == 'fedora':
        #dry stacks
        # dry dependencies
        dist = load_distro(distro_uri(rosdistro))

        distro_arches = []
        for d in target_distros:
            for a in arches:
                distro_arches.append((d, a))

        for s in dist.stacks:
            #print ("Analyzing DRY job [%s]" % s)
            expected_version = dry_get_stack_version(s, dist)
            # Don't report packages as missing if their version is None
            if not expected_version:
                print("Skipping package %s with no version" % s)
                continue
            missing[s] = []
            # for each distro arch check if the pkg is present. If not trigger the build.
            for (d, a) in distro_arches:
                if not repo.pkg_in_repo(repo_url, debianize_package_name(rosdistro, s), expected_version + ".*", d, a, platform=platform):
                    missing[s].append('%s_%s' % (d, a))

    return missing
 def debianize_package_name(self, package_name):
     return debianize_package_name(self._rosdistro, package_name)