コード例 #1
0
ファイル: release_jobs.py プロジェクト: ijt/catkin-debs
def dry_generate_jobgraph(rosdistro):
    if rosdistro == 'backports':
        return {}

    (stack_depends, versions) = dry_get_versioned_dependency_tree(rosdistro)
    
    jobgraph = {}
    for key, val in stack_depends.iteritems():
        jobgraph[debianize_package_name(rosdistro, key)] = [debianize_package_name(rosdistro, p) for p in val ]
    return jobgraph
コード例 #2
0
def dry_generate_jobgraph(rosdistro):
    if rosdistro == 'backports':
        return {}

    (stack_depends, versions) = dry_get_versioned_dependency_tree(rosdistro)

    jobgraph = {}
    for key, val in stack_depends.iteritems():
        jobgraph[debianize_package_name(rosdistro, key)] = [
            debianize_package_name(rosdistro, p) for p in val
        ]
    return jobgraph
コード例 #3
0
ファイル: dependency_walker.py プロジェクト: po1/buildfarm
def get_jenkins_dependencies(rosdistro, packages):
    result = {}
    for pkg_name in sorted(packages.keys()):
        p = packages[pkg_name]
        deb_name = debianize_package_name(rosdistro, p.name)
        build_depends = _get_depends(packages, p, recursive=False, buildtime=True)
        run_depends = _get_depends(packages, p, recursive=False, buildtime=False)

        # switching to only set first level dependencies to clean up clutter in jenkins instead of the recursive ones below
        result[deb_name] = [debianize_package_name(rosdistro, d.name) for d in build_depends | run_depends]
        continue

    return result
コード例 #4
0
ファイル: release_jobs.py プロジェクト: jamuraa/catkin-debs
def dry_generate_jobgraph(rosdistro, wet_jobgraph, stack_depends):
    jobgraph = {}
    for key, val in stack_depends.iteritems():
        dry_depends = [debianize_package_name(rosdistro, p) for p in val]

        untracked_wet_packages = [p for p in dry_depends if p in wet_jobgraph]

        extra_packages = set()
        for p in untracked_wet_packages:
            #print("adding packages for %s - [%s] " % (p, ', '.join(wet_jobgraph[p])) )
            extra_packages.update(wet_jobgraph[p])

        jobgraph[debianize_package_name(rosdistro, key)] = dry_depends + list(extra_packages)
    return jobgraph
コード例 #5
0
def get_dependencies(rosdistro, stacks):
    packages = {}
    build_dependencies = {}
    runtime_dependencies = {}

    for name in sorted(stacks.keys()):
        stack = stacks[name]

        if stack is None:
            packages[name] = sanitize_package_name(name)
            build_dependencies[name] = []
            runtime_dependencies[name] = []
        else:
            catkin_project_name = stack.name
            packages[catkin_project_name] = debianize_package_name(rosdistro, catkin_project_name)
            build_dependencies[catkin_project_name] = [d.name for d in stack.build_depends]
            runtime_dependencies[catkin_project_name] = [d.name for d in stack.depends]

    result = {}
    # combines direct buildtime- and recursive runtime-dependencies
    for k in packages.keys():
        #print '\nDependencies for: ', k
        build_deps = _get_dependencies(build_dependencies, k, packages)
        # recursive runtime depends of build depends
        recursive_runtime_dependencies = _get_dependencies(runtime_dependencies, k, packages, True)
        #print 'Recursive runtime-dependencies:', ', '.join(recursive_runtime_dependencies)
        result[packages[k]] = build_deps | recursive_runtime_dependencies
        #print 'Combined dependencies:', ', '.join(result[packages[k]])
    return result
コード例 #6
0
def get_dependencies(workspace, repository_dict, rosdistro):
    build_dependencies = {}
    runtime_dependencies = {}

    packages = {}
    package_urls = {}

    #print repository_dict
    for name, r in sorted(repository_dict.items()):
        if 'url' not in r:
            print "'url' key missing for repository %s; skipping"%(r)
            continue
        url = r['url']
        print "downloading from %s into %s to be able to trace dependencies" % (url, workspace)
        try:
            stack = get_stack_of_remote_repository(name, 'git', url, workspace)
        except IOError, e:
            if rosdistro == 'backports':
                packages[name] = sanitize_package_name(name)
                build_dependencies[name] = []
                runtime_dependencies[name] = []
                package_urls[name] = url
                print "Processing backport %s, no package.xml file found in repo %s. Continuing"%(name, url)
            else:
                print str(e)
            continue

        catkin_project_name = stack.name

        packages[catkin_project_name] = debianize_package_name(rosdistro, catkin_project_name)

        build_dependencies[catkin_project_name] = [d.name for d in stack.build_depends]
        runtime_dependencies[catkin_project_name] = [d.name for d in stack.depends]

        package_urls[catkin_project_name] = url
コード例 #7
0
def dry_binarydeb_jobs(stackname, rosdistro, distros, jobgraph):
    jenkins_config = jenkins_support.load_server_config_file(
        jenkins_support.get_default_catkin_debs_config())
    package = debianize_package_name(rosdistro, stackname)
    d = dict(PACKAGE=package,
             ROSDISTRO=rosdistro,
             STACK_NAME=stackname,
             USERNAME=jenkins_config.username)
    jobs = []
    for distro in distros:
        for arch in (
                'i386', 'amd64'
        ):  # removed 'armel' as it as qemu debootstrap is segfaulting
            d['ARCH'] = arch
            d['DISTRO'] = distro

            d["CHILD_PROJECTS"] = calc_child_jobs(package, distro, arch,
                                                  jobgraph)
            d["DEPENDENTS"] = "True"
            config = create_dry_binarydeb_config(d)
            #print(config)
            job_name = binarydeb_job_name(package, distro, arch)
            jobs.append((job_name, config))
            #print ("config of %s is %s" % (job_name, config))
    return jobs
コード例 #8
0
def get_dependencies(rosdistro, stacks):
    packages = {}
    build_dependencies = {}
    runtime_dependencies = {}

    for name in sorted(stacks.keys()):
        stack = stacks[name]

        if stack is None:
            packages[name] = sanitize_package_name(name)
            build_dependencies[name] = []
            runtime_dependencies[name] = []
        else:
            catkin_project_name = stack.name
            packages[catkin_project_name] = debianize_package_name(
                rosdistro, catkin_project_name)
            build_dependencies[catkin_project_name] = [
                d.name for d in stack.build_depends
            ]
            runtime_dependencies[catkin_project_name] = [
                d.name for d in stack.depends
            ]

    result = {}
    # combines direct buildtime- and recursive runtime-dependencies
    for k in packages.keys():
        #print '\nDependencies for: ', k
        build_deps = _get_dependencies(build_dependencies, k, packages)
        # recursive runtime depends of build depends
        recursive_runtime_dependencies = _get_dependencies(
            runtime_dependencies, k, packages, True)
        #print 'Recursive runtime-dependencies:', ', '.join(recursive_runtime_dependencies)
        result[packages[k]] = build_deps | recursive_runtime_dependencies
        #print 'Combined dependencies:', ', '.join(result[packages[k]])
    return result
コード例 #9
0
ファイル: release_jobs.py プロジェクト: jamuraa/catkin-debs
def dry_binarydeb_jobs(stackname, dry_maintainers, rosdistro, distros, arches, fqdn, jobgraph, packages_for_sync):
    jenkins_config = jenkins_support.load_server_config_file(jenkins_support.get_default_catkin_debs_config())
    package = debianize_package_name(rosdistro, stackname)
    d = dict(
        FQDN=fqdn,
        PACKAGE=package,
        ROSDISTRO=rosdistro,
        STACK_NAME=stackname,
        NOTIFICATION_EMAIL=' '.join(dry_maintainers),
        USERNAME=jenkins_config.username,
        IS_METAPACKAGES=(stackname == 'metapackages'),
        PACKAGES_FOR_SYNC=str(packages_for_sync)
    )
    jobs = []
    for distro in distros:
        for arch in arches:
            d['ARCH'] = arch
            d['DISTRO'] = distro

            d["CHILD_PROJECTS"] = calc_child_jobs(package, distro, arch, jobgraph)
            d["DEPENDENTS"] = "True"
            config = create_dry_binarydeb_config(d)
            #print(config)
            job_name = binarydeb_job_name(package, distro, arch)
            jobs.append((job_name, config))
            #print ("config of %s is %s" % (job_name, config))
    return jobs
コード例 #10
0
def get_jenkins_dependencies(rosdistro, packages):
    result = {}
    for pkg_name in sorted(packages.keys()):
        p = packages[pkg_name]
        deb_name = debianize_package_name(rosdistro, p.name)
        build_depends = _get_depends(packages,
                                     p,
                                     recursive=False,
                                     buildtime=True)
        run_depends = _get_depends(packages,
                                   p,
                                   recursive=False,
                                   buildtime=False)

        # switching to only set first level dependencies to clean up clutter in jenkins instead of the recursive ones below
        result[deb_name] = [
            debianize_package_name(rosdistro, d.name)
            for d in build_depends | run_depends
        ]
        continue

    return result
コード例 #11
0
ファイル: release_jobs.py プロジェクト: ijt/catkin-debs
def get_missing_wet_packages(repo_map, default_distros, rosdistro, repo_url,
                             arches):
    # We take the intersection of repo-specific targets with default
    # targets.
    missing = {}
    for short_package_name, r in repo_map['repositories'].items():
        if 'url' not in r:
            print('"url" key missing for repository "%s"; skipping' % r)
            continue
        if 'target' not in r or r['target'] == 'all':
            target_distros = default_distros
        else:
            target_distros = list(set(r['target']) & set(default_distros))

        #print ('Analyzing WET stack "%s" for "%s"' % (r['url'], target_distros))
        
        # todo check if sourcedeb is present with the right version
        deb_name = debianize_package_name(rosdistro, short_package_name)
        if not 'version' in r:
            print('"version" key missing for repository %s; skipping' % r)
            continue
        expected_version = r['version']
        if not expected_version:
            expected_version = ''

        missing_source_pkg_names = [
            '%s_source' % d
            for d in target_distros
            if not repo.deb_in_repo(repo_url, deb_name, expected_version+".*", d, arch='na', source=True)
        ]
        missing_binary_pkg_names = [
            '%s_%s' % (d, a)
            for d in target_distros
            for a in arches
            if not repo.deb_in_repo(repo_url, deb_name, expected_version+".*", d, a)
        ]
        
        missing[short_package_name] = missing_source_pkg_names + missing_binary_pkg_names

        # if not trigger sourcedeb

        # else if binaries don't exist trigger them
        for d in target_distros:
            for a in arches:
                pass #missing[short_package_name] = ['source']

    return missing
コード例 #12
0
ファイル: release_jobs.py プロジェクト: ijt/catkin-debs
def get_missing_dry_packages(rosdistro, default_distros, arches, repo_url):
    missing = {}
    dist = load_distro(distro_uri(rosdistro))

    distro_arches = [(d, a) for d in default_distros for a in arches]

    for s in dist.stacks:
        #print ("Analyzing DRY job [%s]" % s)
        expected_version = dry_get_stack_version(s, dist)
        
        # sanitize undeclared versions for string substitution
        if not expected_version:
            expected_version = ''
        missing[s] = []
        # for each distro arch check if the deb is present. If not trigger the build. 
        for (d, a) in distro_arches:
            if not repo.deb_in_repo(repo_url, debianize_package_name(rosdistro, s), expected_version+".*", d, a):
                missing[s].append( '%s_%s' % (d, a) )
    return missing
コード例 #13
0
ファイル: release_jobs.py プロジェクト: ijt/catkin-debs
def dry_binarydeb_jobs(stackname, rosdistro, distros, jobgraph):
    jenkins_config = jenkins_support.load_server_config_file(jenkins_support.get_default_catkin_debs_config())
    package = debianize_package_name(rosdistro, stackname)
    d = dict(
        PACKAGE=package,
        ROSDISTRO=rosdistro,
        STACK_NAME=stackname,
        USERNAME=jenkins_config.username
    )
    jobs = []
    for distro in distros:
        for arch in ('i386', 'amd64'):  # removed 'armel' as it as qemu debootstrap is segfaulting
            d['ARCH'] = arch
            d['DISTRO'] = distro

            d["CHILD_PROJECTS"] = calc_child_jobs(package, distro, arch, jobgraph)
            d["DEPENDENTS"] = "True"
            config = create_dry_binarydeb_config(d)
            #print(config)
            job_name = binarydeb_job_name(package, distro, arch)
            jobs.append((job_name, config))
            #print ("config of %s is %s" % (job_name, config))
    return jobs
コード例 #14
0
def compute_missing(distros, fqdn, rosdistro):
    """ Compute what packages are missing from a repo based on the rosdistro files, both wet and dry. """

    URL_PROTOTYPE = 'https://raw.github.com/ros/rosdistro/master/releases/%s.yaml'

    repo_url = 'http://%s/repos/building' % fqdn

    print('Fetching "%s"' % (URL_PROTOTYPE % rosdistro))
    repo_map = yaml.load(urllib2.urlopen(URL_PROTOTYPE % rosdistro))

    # What ROS distro are we configuring?
    if 'release-name' not in repo_map:
        print('No "release-name" key in yaml file')
        sys.exit(1)
    if repo_map['release-name'] != rosdistro:
        print('release-name mismatch (%s != %s)' %
              (repo_map['release-name'], rosdistro))
        sys.exit(1)
    if 'repositories' not in repo_map:
        print('No "repositories" key in yaml file')
    if 'type' not in repo_map or repo_map['type'] != 'gbp':
        print('Wrong type value in yaml file')
        sys.exit(1)

    # Figure out default distros.  Command-line arg takes precedence; if
    # it's not specified, then read targets.yaml.
    if distros:
        default_distros = distros
    else:
        print('Fetching "%s"' % (URL_PROTOTYPE % 'targets'))
        targets_map = yaml.load(urllib2.urlopen(URL_PROTOTYPE % 'targets'))
        my_targets = [x for x in targets_map if rosdistro in x]
        if len(my_targets) != 1:
            print(
                'Must have exactly one entry for rosdistro "%s" in targets.yaml'
                % rosdistro)
            sys.exit(1)
        default_distros = my_targets[0][rosdistro]

    arches = ['amd64', 'i386']

    # We take the intersection of repo-specific targets with default
    # targets.
    missing = {}
    for short_package_name, r in repo_map['repositories'].items():
        if 'url' not in r:
            print('"url" key missing for repository "%s"; skipping' % r)
            continue
        url = r['url']
        if 'target' not in r or r['target'] == 'all':
            target_distros = default_distros
        else:
            target_distros = list(set(r['target']) & set(default_distros))

        #print ('Analyzing WET stack "%s" for "%s"' % (r['url'], target_distros))

        # todo check if sourcedeb is present with the right version
        deb_name = debianize_package_name(rosdistro, short_package_name)
        if not 'version' in r:
            print('"version" key missing for repository %s; skipping' % r)
            continue
        expected_version = r['version']
        if not expected_version:
            expected_version = ''

        missing[short_package_name] = []
        for d in target_distros:
            if not repo.deb_in_repo(repo_url,
                                    deb_name,
                                    expected_version + ".*",
                                    d,
                                    arch='na',
                                    source=True):
                missing[short_package_name].append('%s_source' % d)
            for a in arches:
                if not repo.deb_in_repo(repo_url, deb_name,
                                        expected_version + ".*", d, a):
                    missing[short_package_name].append('%s_%s' % (d, a))

        # if not trigger sourcedeb

        # else if binaries don't exist trigger them
        for d in target_distros:
            for a in arches:
                pass  #missing[short_package_name] = ['source']

    #dry stacks
    # dry dependencies
    dist = load_distro(distro_uri(rosdistro))

    distro_arches = []
    for d in default_distros:
        for a in arches:
            distro_arches.append((d, a))

    for s in dist.stacks:
        #print ("Analyzing DRY job [%s]" % s)
        expected_version = dry_get_stack_version(s, dist)

        # sanitize undeclared versions for string substitution
        if not expected_version:
            expected_version = ''
        missing[s] = []
        # for each distro arch check if the deb is present. If not trigger the build.
        for (d, a) in distro_arches:
            if not repo.deb_in_repo(repo_url,
                                    debianize_package_name(rosdistro, s),
                                    expected_version + ".*", d, a):
                missing[s].append('%s_%s' % (d, a))

    return missing
コード例 #15
0
ファイル: release_jobs.py プロジェクト: ahendrix/catkin-debs
def compute_missing(distros, fqdn, rosdistro):
    """ Compute what packages are missing from a repo based on the rosdistro files, both wet and dry. """

    URL_PROTOTYPE = 'https://raw.github.com/ros/rosdistro/master/releases/%s.yaml'


    repo_url = 'http://%s/repos/building' % fqdn

    print('Fetching "%s"' % (URL_PROTOTYPE % rosdistro))
    repo_map = yaml.load(urllib2.urlopen(URL_PROTOTYPE % rosdistro))


    # What ROS distro are we configuring?
    if 'release-name' not in repo_map:
        print('No "release-name" key in yaml file')
        sys.exit(1)
    if repo_map['release-name'] != rosdistro:
        print('release-name mismatch (%s != %s)' % (repo_map['release-name'], rosdistro))
        sys.exit(1)    
    if 'repositories' not in repo_map:
        print('No "repositories" key in yaml file')
    if 'type' not in repo_map or repo_map['type'] != 'gbp':
        print('Wrong type value in yaml file')
        sys.exit(1)

    # Figure out default distros.  Command-line arg takes precedence; if
    # it's not specified, then read targets.yaml.
    if distros:
        default_distros = distros
    else:
        print('Fetching "%s"' % (URL_PROTOTYPE % 'targets'))
        targets_map = yaml.load(urllib2.urlopen(URL_PROTOTYPE % 'targets'))
        my_targets = [x for x in targets_map if rosdistro in x]
        if len(my_targets) != 1:
            print('Must have exactly one entry for rosdistro "%s" in targets.yaml' % rosdistro)
            sys.exit(1)
        default_distros = my_targets[0][rosdistro]

    arches = ['amd64', 'i386']

    # We take the intersection of repo-specific targets with default
    # targets.
    missing = {}
    for short_package_name, r in repo_map['repositories'].items():
        if 'url' not in r:
            print('"url" key missing for repository "%s"; skipping' % r)
            continue
        url = r['url']
        if 'target' not in r or r['target'] == 'all':
            target_distros = default_distros
        else:
            target_distros = list(set(r['target']) & set(default_distros))

        #print ('Analyzing WET stack "%s" for "%s"' % (r['url'], target_distros))
        
        # todo check if sourcedeb is present with the right version
        deb_name = debianize_package_name(rosdistro, short_package_name)
        if not 'version' in r:
            print('"version" key missing for repository %s; skipping' % r)
            continue
        expected_version = r['version']
        if not expected_version:
            expected_version = ''
        
        missing[short_package_name] = []
        for d in target_distros:
            if not repo.deb_in_repo(repo_url, deb_name, expected_version+".*", d, arch='na', source=True):
                missing[short_package_name].append('%s_source' % d)
            for a in arches:
                if not repo.deb_in_repo(repo_url, deb_name, expected_version+".*", d, a):
                    missing[short_package_name].append('%s_%s' % (d, a))

                                               
        # if not trigger sourcedeb

        # else if binaries don't exist trigger them
        for d in target_distros:
            for a in arches:
                pass#missing[short_package_name] = ['source']
        


        

    #dry stacks
    # dry dependencies
    dist = load_distro(distro_uri(rosdistro))

    distro_arches = []
    for d in default_distros:
        for a in arches:
            distro_arches.append( (d, a) )

    for s in dist.stacks:
        #print ("Analyzing DRY job [%s]" % s)
        expected_version = dry_get_stack_version(s, dist)
        
        # sanitize undeclared versions for string substitution
        if not expected_version:
            expected_version = ''
        missing[s] = []
        # for each distro arch check if the deb is present. If not trigger the build. 
        for (d, a) in distro_arches:
            if not repo.deb_in_repo(repo_url, debianize_package_name(rosdistro, s), expected_version+".*", d, a):
                missing[s].append( '%s_%s' % (d, a) )


    return missing
コード例 #16
0
ファイル: release_jobs.py プロジェクト: jamuraa/catkin-debs
def compute_missing(distros, arches, fqdn, rosdistro, sourcedeb_only=False):
    """ Compute what packages are missing from a repo based on the rosdistro files, both wet and dry. """

    repo_url = 'http://%s/repos/building' % fqdn

    rd = Rosdistro(rosdistro)
    # We take the intersection of repo-specific targets with default
    # targets.

    if distros:
        target_distros = distros
    else:
        target_distros = rd.get_target_distros()

    missing = {}
    for short_package_name in rd.get_package_list():
        #print ('Analyzing WET stack "%s" for "%s"' % (r['url'], target_distros))

        # todo check if sourcedeb is present with the right version
        deb_name = debianize_package_name(rosdistro, short_package_name)
        expected_version = rd.get_version(short_package_name, full_version=True)

        # Don't report packages as missing if their version is None
        if not expected_version:
            print("Skipping package %s with no version" % short_package_name)
            continue


        missing[short_package_name] = []
        for d in target_distros:
            if not repo.deb_in_repo(repo_url, deb_name, str(expected_version) + d, d, arch='na', source=True):
                missing[short_package_name].append('%s_source' % d)
            if not sourcedeb_only:
                for a in arches:
                    if not repo.deb_in_repo(repo_url, deb_name, str(expected_version) + ".*", d, a):
                        missing[short_package_name].append('%s_%s' % (d, a))

    if not sourcedeb_only:
        #dry stacks
        # dry dependencies
        dist = load_distro(distro_uri(rosdistro))

        distro_arches = []
        for d in target_distros:
            for a in arches:
                distro_arches.append((d, a))

        for s in dist.stacks:
            #print ("Analyzing DRY job [%s]" % s)
            expected_version = dry_get_stack_version(s, dist)
            # Don't report packages as missing if their version is None
            if not expected_version:
                print("Skipping package %s with no version" % s)
                continue
            missing[s] = []
            # for each distro arch check if the deb is present. If not trigger the build.
            for (d, a) in distro_arches:
                if not repo.deb_in_repo(repo_url, debianize_package_name(rosdistro, s), expected_version + ".*", d, a):
                    missing[s].append('%s_%s' % (d, a))

    return missing