예제 #1
0
파일: dump.py 프로젝트: Linutronix/elbe
def get_initvm_pkglist():
    with warnings.catch_warnings():
        warnings.filterwarnings("ignore", category=DeprecationWarning)
        cache = Cache()
        cache.open()
        pkglist = [APTPackage(p) for p in cache if p.is_installed]

    return pkglist
예제 #2
0
    def __init__(
            self,
            rfs,
            log,
            arch,
            notifier=None,
            norecommend=False,
            noauth=True):

        # pylint: disable=too-many-arguments

        sys.stdout = open(log, 'a', buffering=0)
        sys.stderr = open(log, 'a', buffering=0)
        self.logfile = open(log, 'a', buffering=0)

        InChRootObject.__init__(self, rfs)

        self.notifier = notifier
        config.set("APT::Architecture", arch)
        if norecommend:
            config.set("APT::Install-Recommends", "0")
        else:
            config.set("APT::Install-Recommends", "1")

        if noauth:
            config.set("APT::Get::AllowUnauthenticated", "1")
        else:
            config.set("APT::Get::AllowUnauthenticated", "0")

        self.cache = Cache(progress=ElbeOpProgress())
        self.cache.open(progress=ElbeOpProgress())
예제 #3
0
 def _update_datas(self):
     """
         Refresh the table's datas when a update has been called
     """
     self.cache = Cache()
     self.packages = []
     self.to_install = []
     self._populate()
예제 #4
0
파일: cdroms.py 프로젝트: atoz-chevara/elbe
def mk_source_cdrom(rfs, arch, codename, init_codename, target, log, cdrom_size=CDROM_SIZE):

    hostfs.mkdir_p( '/var/cache/elbe/sources' )
    rfs.mkdir_p( '/var/cache/elbe/sources' )

    repo = CdromSrcRepo( codename, init_codename,
                         os.path.join( target, "srcrepo" ),
                         log,
                         cdrom_size )

    cache = get_rpcaptcache( rfs, "aptcache.log", arch )

    pkglist = cache.get_installed_pkgs()

    for pkg in pkglist:
        try:
            dsc = cache.download_source( pkg.name, '/var/cache/elbe/sources' )
            repo.includedsc( dsc )
        except ValueError as ve:
            log.printo( "No sources for Package " + pkg.name + "-" + pkg.installed_version )
        except FetchError as fe:
            log.printo( "Source for Package " + pkg.name + "-" + pkg.installed_version + " could not be downloaded" )

    pkglist = get_initvm_pkglist()
    cache = Cache ()
    cache.open ()

    for pkg in pkglist:
        try:
            p = cache[pkg.name]
            if pkg.name == 'elbe-bootstrap':
                pkgver = p.versions [0]
            else:
                pkgver = p.installed

            dsc = pkgver.fetch_source ('/var/cache/elbe/sources',
                             ElbeAcquireProgress (cb=None), unpack=False)
            repo.includedsc( dsc )
        except ValueError as ve:
            log.printo( "No sources for Package " + pkg.name + "-" + str(pkg.installed_version) )
        except FetchError as fe:
            log.printo( "Source for Package " + pkg.name + "-" + pkgver.version + " could not be downloaded" )

    return repo.buildiso( os.path.join( target, "src-cdrom.iso" ) )
예제 #5
0
    def __init__(self, inifile):
        """
            :param inifile:
                Path to the packages.ini ini file
        """
        QtCore.QAbstractTableModel.__init__(self, None)
        self.packages = []
        self.to_install = []
        self.inifile = inifile
        self.cache = Cache()

        self._populate()
예제 #6
0
    def __init__( self, rfs, logpath, arch, notifier=None, norecommend = False, noauth = True ):
        self.log = ASCIIDocLog(logpath)
        self.notifier = notifier
        InChRootObject.__init__(self, rfs)
        config.set ("APT::Architecture", arch)
        if norecommend:
            config.set ("APT::Install-Recommends", "1")
        else:
            config.set ("APT::Install-Recommends", "0")

        if noauth:
            config.set ("APT::Get::AllowUnauthenticated", "1")
        else:
            config.set ("APT::Get::AllowUnauthenticated", "0")

        self.cache = Cache()
        self.cache.open()
예제 #7
0
def run_command(argv):

    # TODO - Set threshold and remove pylint directives
    #
    # We might want to make the threshold higher for certain
    # files/directories or just globaly.

    # pylint: disable=too-many-locals
    # pylint: disable=too-many-branches
    # pylint: disable=too-many-statements

    oparser = OptionParser(
        usage="usage: %prog fetch_initvm_pkgs [options] <xmlfile>")

    oparser.add_option("-b",
                       "--binrepo",
                       dest="binrepo",
                       default="/var/cache/elbe/initvm-bin-repo",
                       help="directory where the bin repo should reside")

    oparser.add_option("-s",
                       "--srcrepo",
                       dest="srcrepo",
                       default="/var/cache/elbe/initvm-src-repo",
                       help="directory where the src repo should reside")

    oparser.add_option("--skip-validation",
                       action="store_true",
                       dest="skip_validation",
                       default=False,
                       help="Skip xml schema validation")

    oparser.add_option("--cdrom-mount-path",
                       dest="cdrom_path",
                       help="path where cdrom is mounted")

    oparser.add_option("--cdrom-device",
                       dest="cdrom_device",
                       help="cdrom device, in case it has to be mounted")

    oparser.add_option("--apt-archive",
                       dest="archive",
                       default="/var/cache/elbe/binaries/main",
                       help="path where binary packages are downloaded to.")

    oparser.add_option("--src-archive",
                       dest="srcarchive",
                       default="/var/cache/elbe/sources",
                       help="path where src packages are downloaded to.")

    oparser.add_option("--skip-build-sources",
                       action="store_false",
                       dest="build_sources",
                       default=True,
                       help="Skip downloading Source Packages")

    oparser.add_option("--skip-build-bin",
                       action="store_false",
                       dest="build_bin",
                       default=True,
                       help="Skip downloading binary packages")

    (opt, args) = oparser.parse_args(argv)

    if len(args) != 1:
        print("wrong number of arguments")
        oparser.print_help()
        sys.exit(20)

    try:
        xml = ElbeXML(args[0], skip_validate=opt.skip_validation)
    except ValidationError as e:
        print(str(e))
        print("xml validation failed. Bailing out")
        sys.exit(20)

    with elbe_logging({"streams": sys.stdout}):

        if opt.cdrom_path:
            if opt.cdrom_device:
                do('mount "%s" "%s"' % (opt.cdrom_device, opt.cdrom_path))

            # a cdrom build is identified by the cdrom option
            # the xml file that is copied into the initvm
            # by the initrd does not have the cdrom tags setup.
            mirror = "file://%s" % opt.cdrom_path
        else:
            mirror = xml.get_initvm_primary_mirror(opt.cdrom_path)

        init_codename = xml.get_initvm_codename()

        # Binary Repo
        #
        repo = CdromInitRepo(init_codename, opt.binrepo, mirror)

        hostfs.mkdir_p(opt.archive)

        if opt.build_bin:
            pkglist = get_initvm_pkglist()
            cache = Cache()
            cache.open()
            for pkg in pkglist:
                pkg_id = "%s-%s" % (pkg.name, pkg.installed_version)
                try:
                    p = cache[pkg.name]
                    pkgver = p.installed
                    deb = fetch_binary(pkgver, opt.archive,
                                       ElbeAcquireProgress(cb=None))
                    repo.includedeb(deb, 'main', prio=pkgver.priority)
                except ValueError:
                    logging.exception('No package "%s"', pkg_id)
                except FetchError:
                    logging.exception(
                        'Package "%s-%s" could not be downloaded', pkg.name,
                        pkgver.version)
                except TypeError:
                    logging.exception('Package "%s" missing name or version',
                                      pkg_id)

        repo.finalize()

        # Source Repo
        #
        repo = CdromSrcRepo(init_codename, init_codename, opt.srcrepo, 0,
                            mirror)
        hostfs.mkdir_p(opt.srcarchive)

        # a cdrom build does not have sources
        # skip adding packages to the source repo
        #
        # FIXME: we need a way to add source cdroms later on
        if opt.cdrom_path:
            opt.build_sources = False

        if opt.build_sources:
            for pkg in pkglist:
                pkg_id = "%s-%s" % (pkg.name, pkg.installed_version)
                try:
                    p = cache[pkg.name]
                    pkgver = p.installed
                    dsc = pkgver.fetch_source(opt.srcarchive,
                                              ElbeAcquireProgress(cb=None),
                                              unpack=False)
                    repo.include_init_dsc(dsc, 'initvm')
                except ValueError:
                    logging.exception('No package "%s"', pkg_id)
                except FetchError:
                    logging.exception(
                        'Package "%s-%s" could not be downloaded', pkg.name,
                        pkgver.version)
                except TypeError:
                    logging.exception('Package "%s" missing name or version',
                                      pkg_id)

        repo.finalize()

        if opt.cdrom_device:
            do('umount "%s"' % opt.cdrom_device)
예제 #8
0
class RPCAPTCache(InChRootObject):
    # pylint: disable=too-many-public-methods
    def __init__(self,
                 rfs,
                 log,
                 arch,
                 notifier=None,
                 norecommend=False,
                 noauth=True):

        # pylint: disable=too-many-arguments

        sys.stdout = open(log, 'a', buffering=0)
        sys.stderr = open(log, 'a', buffering=0)
        self.logfile = open(log, 'a', buffering=0)

        InChRootObject.__init__(self, rfs)

        self.notifier = notifier
        config.set("APT::Architecture", arch)
        if norecommend:
            config.set("APT::Install-Recommends", "0")
        else:
            config.set("APT::Install-Recommends", "1")

        if noauth:
            config.set("APT::Get::AllowUnauthenticated", "1")
        else:
            config.set("APT::Get::AllowUnauthenticated", "0")

        self.cache = Cache(progress=ElbeOpProgress())
        self.cache.open(progress=ElbeOpProgress())

    def dbg_dump(self, filename):
        ts = time.localtime()
        filename = filename + ('_%02d%02d%02d' %
                               (ts.tm_hour, ts.tm_min, ts.tm_sec))
        with open(filename, 'w') as dbg:
            for p in self.cache:
                dbg.write(
                    '%s %s %d %d %d %d %d %d %d %d %d %d %d %d\n' %
                    (p.name, p.candidate.version, p.marked_keep,
                     p.marked_delete, p.marked_upgrade, p.marked_downgrade,
                     p.marked_install, p.marked_reinstall, p.is_auto_installed,
                     p.is_installed, p.is_auto_removable, p.is_now_broken,
                     p.is_inst_broken, p.is_upgradable))

    def get_sections(self):
        ret = list(set([p.section for p in self.cache]))
        ret.sort()
        return ret

    def get_pkglist(self, section):
        if section == 'all':
            ret = [APTPackage(p) for p in self.cache]
        else:
            ret = [APTPackage(p) for p in self.cache if p.section == section]

        return ret

    def mark_install(self, pkgname, version, from_user=True, nodeps=False):
        p = self.cache[pkgname]
        if version:
            p.candidate = p.versions[version]
        p.mark_install(auto_fix=not nodeps,
                       auto_inst=not nodeps,
                       from_user=from_user)

    def mark_install_devpkgs(self, ignore_pkgs, ignore_dev_pkgs):
        ignore_pkgs.discard('libc6')  # we don't want to ignore libc
        ignore_pkgs.discard('libstdc++5')
        ignore_pkgs.discard('libstdc++6')
        # list all debian src packages of all installed packages that don't
        # come from debootstrap
        src_list = [
            p.candidate.source_name for p in self.cache
            if (p.is_installed and p.name not in ignore_pkgs)
        ]
        # go through all packages, remember package if its source package
        # matches one of the installed packages and the binary package is a
        # '-dev' package
        dev_list = [
            s for s in self.cache if (s.candidate.source_name in src_list and (
                s.name.endswith('-dev')))
        ]
        for p in dev_list:
            if p.name not in ignore_dev_pkgs:
                p.mark_install()
        # ensure that the symlinks package will be installed (it's needed for
        # fixing links inside the sysroot
        self.cache['symlinks'].mark_install()

        for p in ignore_dev_pkgs:
            self.cache[p].mark_delete()

        dbgsym_list = [
            s.name + '-dbgsym' for s in self.cache
            if (s.is_installed or s.marked_install)
        ]

        for p in dbgsym_list:
            if p in self.cache:
                self.cache[p].mark_install()

    def cleanup(self, exclude_pkgs):
        for p in self.cache:
            if p.is_installed and not \
               p.is_auto_installed or \
               p.is_auto_removable:
                remove = True
                for x in exclude_pkgs:
                    if x == p.name:
                        remove = False
                if remove:
                    p.mark_delete(auto_fix=True, purge=True)

    def mark_upgrade(self, pkgname, version):
        p = self.cache[pkgname]
        if version:
            p.candidate = p.versions[version]
        p.mark_upgrade()

    def mark_delete(self, pkgname):
        p = self.cache[pkgname]
        p.mark_delete(purge=True)

    def mark_keep(self, pkgname, version):
        p = self.cache[pkgname]
        p.mark_keep()

    def update(self):
        self.cache.update(fetch_progress=ElbeAcquireProgress())
        self.cache.open(progress=ElbeOpProgress())

    def commit(self):
        os.environ["DEBIAN_FRONTEND"] = "noninteractive"
        os.environ["DEBONF_NONINTERACTIVE_SEEN"] = "true"
        self.cache.commit(ElbeAcquireProgress(),
                          ElbeInstallProgress(fileno=self.logfile.fileno()))
        self.cache.open(progress=ElbeOpProgress())

    def clear(self):
        self.cache.clear()

    def get_dependencies(self, pkgname):
        deps = getalldeps(self.cache, pkgname)
        return [APTPackage(p, cache=self.cache) for p in deps]

    def get_installed_pkgs(self, section='all'):
        # avoid DeprecationWarning: MD5Hash is deprecated, use Hashes instead
        # triggerd by python-apt
        with warnings.catch_warnings():
            warnings.filterwarnings("ignore", category=DeprecationWarning)
            if section == 'all':
                pl = [APTPackage(p) for p in self.cache if p.is_installed]
            else:
                pl = [
                    APTPackage(p) for p in self.cache
                    if (p.section == section and p.is_installed)
                ]
            return pl

    def get_fileindex(self):
        index = {}

        for p in self.cache:
            if p.is_installed:
                for f in p.installed_files:
                    index[f] = p.name

        return index

    def get_marked_install(self, section='all'):
        if section == 'all':
            ret = [APTPackage(p) for p in self.cache if p.marked_install]
        else:
            ret = [
                APTPackage(p) for p in self.cache
                if (p.section == section and p.marked_install)
            ]
        return ret

    def get_upgradeable(self, section='all'):
        if section == 'all':
            ret = [APTPackage(p) for p in self.cache if p.is_upgradable]
        else:
            ret = [
                APTPackage(p) for p in self.cache
                if (p.section == section and p.is_upgradable)
            ]
        return ret

    def upgrade(self, dist_upgrade=False):
        self.cache.upgrade(dist_upgrade)

    def get_changes(self):
        changes = self.cache.get_changes()
        return [APTPackage(p) for p in changes]

    def has_pkg(self, pkgname):
        return pkgname in self.cache

    def is_installed(self, pkgname):
        if pkgname not in self.cache:
            return False
        return self.cache[pkgname].is_installed

    def get_pkg(self, pkgname):
        return APTPackage(self.cache[pkgname])

    def get_pkgs(self, pkgname):
        return [
            APTPackage(self.cache[p]) for p in sorted(self.cache.keys())
            if pkgname in p.lower()
        ]

    def compare_versions(self, ver1, ver2):
        return version_compare(ver1, ver2)

    def download_binary(self, pkgname, path, version=None):
        p = self.cache[pkgname]
        if version is None:
            pkgver = p.installed
        else:
            pkgver = p.versions[version]
        # avoid DeprecationWarning:
        # "MD5Hash is deprecated, use Hashes instead"
        # triggerd by python-apt
        with warnings.catch_warnings():
            warnings.filterwarnings("ignore", category=DeprecationWarning)
            rel_filename = pkgver.fetch_binary(path, ElbeAcquireProgress())
            return self.rfs.fname(rel_filename)

    def download_source(self, pkgname, path, version=None):
        p = self.cache[pkgname]
        if version is None:
            pkgver = p.installed
        else:
            pkgver = p.versions[version]

        # avoid DeprecationWarning:
        # "MD5Hash is deprecated, use Hashes instead"
        # triggerd by python-apt
        with warnings.catch_warnings():
            warnings.filterwarnings("ignore", category=DeprecationWarning)
            rel_filename = pkgver.fetch_source(path,
                                               ElbeAcquireProgress(),
                                               unpack=False)
            return self.rfs.fname(rel_filename)
예제 #9
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the CI job")

    # Positional
    add_argument_rosdistro_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)

    add_argument_distribution_repository_key_files(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_dockerfile_dir(parser)
    add_argument_env_vars(parser)
    add_argument_package_selection_args(parser)
    add_argument_repos_file_urls(parser, required=True)
    add_argument_skip_rosdep_keys(parser)
    add_argument_test_branch(parser)
    parser.add_argument('--workspace-root',
                        nargs='+',
                        help='The root path of the workspace to compile')
    args = parser.parse_args(argv)

    debian_pkg_names = [
        'git',
        'python3-apt',
        'python3-colcon-package-information',
        'python3-colcon-package-selection',
        'python3-colcon-recursive-crawl',
        'python3-colcon-ros',
        'python3-rosdep',
        'python3-vcstool',
    ]

    # get versions for build dependencies
    apt_cache = Cache()
    debian_pkg_versions = get_binary_package_versions(apt_cache,
                                                      debian_pkg_names)

    # generate Dockerfile
    data = {
        'os_name':
        args.os_name,
        'os_code_name':
        args.os_code_name,
        'arch':
        args.arch,
        'distribution_repository_urls':
        args.distribution_repository_urls,
        'distribution_repository_keys':
        get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'rosdistro_name':
        args.rosdistro_name,
        'custom_rosdep_urls': [],
        'uid':
        get_user_id(),
        'build_environment_variables':
        args.env_vars,
        'dependencies':
        debian_pkg_names,
        'dependency_versions':
        debian_pkg_versions,
        'repos_file_urls':
        args.repos_file_urls,
        'test_branch':
        args.test_branch,
        'skip_rosdep_keys':
        args.skip_rosdep_keys,
        'package_selection_args':
        args.package_selection_args,
        'workspace_root':
        args.workspace_root,
    }
    create_dockerfile('ci/create_workspace.Dockerfile.em', data,
                      args.dockerfile_dir)
예제 #10
0
 def __init__(self):
     if not self.packages_required:
         return False
     self.os = (self.detect_os()).strip()
     if self.os == 'debian':
         self._package_cache = Cache()
예제 #11
0
class ZfsRequires (object):
    """
    Class manages installing additional packages

    TODO:
    - yum package handling
    - dnf package handling
    """

    packages_required = ["zfsutils-linux", "debootstrap", "gdisk", "zfs-initramfs"]
  
    def __init__(self):
        if not self.packages_required:
            return False
        self.os = (self.detect_os()).strip()
        if self.os == 'debian':
            self._package_cache = Cache()

    # [ EXECUTOR ]
    def load_runner(self, cmd):
        """
        Executes given command and returns errors if any
        :cmd: [list]
        :ret: [list]
        """
        cmd = ' '.join(cmd)
        cmd_out = Popen(cmd, shell=True, bufsize=1, stdout=PIPE, stderr=PIPE)

        ret = []
        for pipe in cmd_out.communicate():
            ret.append(pipe.decode('utf-8'))
        return ret


    def detect_os(self):
        """
        Detects OS family for correct package installation
        :returns: str
        """
        return self.load_runner(['cat /etc/os-release', '| grep ID_LIKE', '| awk -F \'=\' \'{print $2}\''])[0]

    def apt_update(self):
        """
        Updates apt package cahe
        :msg: Success or False if not needed
        """

        print(" [ Checking that requied packages are present ]")
        msg = []
        for this_package in self.packages_required:

            if not self._package_cache[this_package].is_installed:
                self._package_cache.update()
                msg.append('Package cache updated')
                msg.append(self.apt_install())
                break
            else:
                msg.append('Package '+this_package+' present')

        return msg

    def apt_install(self):
        """
        Installs packages from packages_required
        """
        res = []
        for this_package in self.packages_required:

            if not self._package_cache[this_package].is_installed:
                self._package_cache[this_package].mark_install()
                res.append(' '.join(['Package', this_package, 'marked for install.\n']))

        self._package_cache.commit()
        return ''.join(res)
 def postinit(self, path):
     """Checks to be run before real check or to create pre-calculated data for several runs. Only called once!"""
     try:
         self.apt = Cache(memonly=True)
     except Exception as ex:
         self.debug('failed to load APT cache: %s' % (ex,))
예제 #13
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the CI job")

    # Positional
    add_argument_rosdistro_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)

    add_argument_build_tool(parser, required=True)
    a1 = add_argument_build_tool_args(parser)
    a2 = add_argument_build_tool_test_args(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_dockerfile_dir(parser)
    add_argument_env_vars(parser)
    add_argument_install_packages(parser)
    add_argument_ros_version(parser)
    add_argument_run_abichecker(parser)
    add_argument_require_gpu_support(parser)
    add_argument_testing(parser)
    parser.add_argument('--workspace-root',
                        nargs='+',
                        help='The root path of the workspace to compile')

    remainder_args = extract_multiple_remainders(argv, (a1, a2))
    args = parser.parse_args(argv)
    for k, v in remainder_args.items():
        setattr(args, k, v)

    apt_cache = Cache()

    debian_pkg_names = set(['build-essential'])
    debian_pkg_names.update(args.install_packages)
    if args.build_tool == 'colcon':
        debian_pkg_names.update([
            'python3-catkin-pkg-modules',
            'python3-colcon-metadata',
            'python3-colcon-output',
            'python3-colcon-package-selection',
            'python3-colcon-parallel-executor',
            'python3-colcon-ros',
            'python3-colcon-test-result',
            'python3-rosdistro-modules',
        ])

    print('Always install the following generic dependencies:')
    for debian_pkg_name in sorted(debian_pkg_names):
        print('  -', debian_pkg_name)

    install_list = 'install_list.txt'
    write_install_list(os.path.join(args.dockerfile_dir, install_list),
                       debian_pkg_names, apt_cache)
    install_lists = [install_list, 'install_list_build.txt']
    if args.testing:
        install_lists.append('install_list_test.txt')

    mapped_workspaces = [
        (workspace_root, '/tmp/ws%s' % (index if index > 1 else ''))
        for index, workspace_root in enumerate(args.workspace_root, 1)
    ]

    # generate Dockerfile
    data = {
        'os_name':
        args.os_name,
        'os_code_name':
        args.os_code_name,
        'arch':
        args.arch,
        'distribution_repository_urls':
        args.distribution_repository_urls,
        'distribution_repository_keys':
        get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'rosdistro_name':
        args.rosdistro_name,
        'uid':
        get_user_id(),
        'build_tool':
        args.build_tool,
        'build_tool_args':
        args.build_tool_args,
        'build_tool_test_args':
        args.build_tool_test_args,
        'ros_version':
        args.ros_version,
        'build_environment_variables':
        ['%s=%s' % key_value for key_value in args.env_vars.items()],
        'install_lists':
        install_lists,
        'dependencies': [],
        'dependency_versions': [],
        'testing':
        args.testing,
        'run_abichecker':
        args.run_abichecker,
        'require_gpu_support':
        args.require_gpu_support,
        'workspace_root':
        mapped_workspaces[-1][1],
        'parent_result_space':
        [mapping[1] for mapping in mapped_workspaces[:-1]],
    }
    create_dockerfile('devel/devel_task.Dockerfile.em', data,
                      args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    for mapping in mapped_workspaces:
        print('  -v %s:%s' % mapping)
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the devel job")
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
        'sourced')
    parser.add_argument('--workspace-root',
                        nargs='+',
                        help='The root path of the workspace to compile')
    parser.add_argument('--os-name',
                        required=True,
                        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument('--os-code-name',
                        required=True,
                        help="The OS code name (e.g. 'xenial')")
    parser.add_argument('--arch',
                        required=True,
                        help="The architecture (e.g. 'amd64')")
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_build_tool(parser, required=True)
    add_argument_ros_version(parser)
    add_argument_env_vars(parser)
    add_argument_dockerfile_dir(parser)
    parser.add_argument(
        '--testing',
        action='store_true',
        help='The flag if the workspace should be built with tests enabled '
        'and instead of installing the tests are ran')
    args = parser.parse_args(argv)

    # get direct build dependencies
    pkgs = {}
    for workspace_root in args.workspace_root:
        source_space = os.path.join(workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs.update(find_packages(source_space))

    pkg_names = [pkg.name for pkg in pkgs.values()]
    print("Found the following packages:")
    for pkg_name in sorted(pkg_names):
        print('  -', pkg_name)

    maintainer_emails = set([])
    for pkg in pkgs.values():
        for m in pkg.maintainers:
            maintainer_emails.add(m.email)
    if maintainer_emails:
        print('Package maintainer emails: %s' %
              ' '.join(sorted(maintainer_emails)))

    context = initialize_resolver(args.rosdistro_name, args.os_name,
                                  args.os_code_name)

    apt_cache = Cache()

    debian_pkg_names = [
        'build-essential',
        'python3',
    ]
    if args.build_tool == 'colcon':
        debian_pkg_names += [
            'python3-colcon-ros',
            'python3-colcon-test-result',
        ]
    elif 'catkin' not in pkg_names:
        debian_pkg_names += resolve_names(['catkin'], **context)
    print('Always install the following generic dependencies:')
    for debian_pkg_name in sorted(debian_pkg_names):
        print('  -', debian_pkg_name)

    debian_pkg_versions = {}

    # get build dependencies and map them to binary packages
    build_depends = get_dependencies(
        pkgs.values(), 'build', _get_build_and_recursive_run_dependencies)
    debian_pkg_names_building = resolve_names(build_depends, **context)
    debian_pkg_names_building -= set(debian_pkg_names)
    debian_pkg_names += order_dependencies(debian_pkg_names_building)
    debian_pkg_versions.update(
        get_binary_package_versions(apt_cache, debian_pkg_names))

    # get run and test dependencies and map them to binary packages
    run_and_test_depends = get_dependencies(pkgs.values(), 'run and test',
                                            _get_run_and_test_dependencies)
    debian_pkg_names_testing = resolve_names(run_and_test_depends, **context)
    # all additional run/test dependencies
    # are added after the build dependencies
    # in order to reuse existing images in the docker container
    debian_pkg_names_testing -= set(debian_pkg_names)
    debian_pkg_versions.update(
        get_binary_package_versions(apt_cache, debian_pkg_names_testing))
    if args.testing:
        debian_pkg_names += order_dependencies(debian_pkg_names_testing)

    # generate Dockerfile
    data = {
        'os_name':
        args.os_name,
        'os_code_name':
        args.os_code_name,
        'arch':
        args.arch,
        'distribution_repository_urls':
        args.distribution_repository_urls,
        'distribution_repository_keys':
        get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'rosdistro_name':
        args.rosdistro_name,
        'uid':
        get_user_id(),
        'build_tool':
        args.build_tool,
        'ros_version':
        args.ros_version,
        'build_environment_variables':
        args.env_vars,
        'dependencies':
        debian_pkg_names,
        'dependency_versions':
        debian_pkg_versions,
        'testing':
        args.testing,
        'prerelease_overlay':
        len(args.workspace_root) > 1,
    }
    create_dockerfile('devel/devel_task.Dockerfile.em', data,
                      args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    print('  -v %s:/tmp/ws' % args.workspace_root[-1])
예제 #15
0
def mk_binary_cdrom(rfs,
                    arch,
                    codename,
                    init_codename,
                    xml,
                    target,
                    log,
                    cdrom_size=CDROM_SIZE):

    rfs.mkdir_p('/var/cache/elbe/binaries/added')
    rfs.mkdir_p('/var/cache/elbe/binaries/main')
    hostfs.mkdir_p('/var/cache/elbe/binaries/main')

    if not xml is None:
        mirror = xml.get_primary_mirror(rfs.fname("cdrom"))
    else:
        mirror = 'http://ftp.debian.org/debian'

    repo_path = os.path.join(target, "binrepo")

    repo = CdromBinRepo(arch, codename, init_codename, repo_path, log,
                        cdrom_size, mirror)

    if not xml is None:
        pkglist = get_initvm_pkglist()
        cache = Cache()
        cache.open()
        for pkg in pkglist:
            try:
                p = cache[pkg.name]
                if pkg.name == 'elbe-bootstrap':
                    pkgver = p.versions[0]
                else:
                    pkgver = p.installed
                deb = pkgver.fetch_binary('/var/cache/elbe/binaries/main',
                                          ElbeAcquireProgress(cb=None))
                repo.include_init_deb(deb, 'main')
            except ValueError as ve:
                log.printo("No Package " + pkg.name + "-" +
                           str(pkg.installed_version))
            except FetchError as fe:
                log.printo("Package " + pkg.name + "-" + pkgver.version +
                           " could not be downloaded")
            except TypeError as te:
                log.printo("Package " + pkg.name + "-" +
                           str(pkg.installed_version) +
                           " missing name or version")

        cache = get_rpcaptcache(rfs, "aptcache.log", arch)
        for p in xml.node("debootstrappkgs"):
            pkg = XMLPackage(p, arch)
            try:
                deb = cache.download_binary(pkg.name,
                                            '/var/cache/elbe/binaries/main',
                                            pkg.installed_version)
                repo.includedeb(deb, 'main')
            except ValueError as ve:
                log.printo("No Package " + pkg.name + "-" +
                           pkg.installed_version)
            except FetchError as fe:
                log.printo("Package " + pkg.name + "-" +
                           pkg.installed_version + " could not be downloaded")
            except TypeError as te:
                log.printo("Package " + pkg.name + "-" +
                           pkg.installed_version + " missing name or version")

    cache = get_rpcaptcache(rfs, "aptcache.log", arch)
    pkglist = cache.get_installed_pkgs()
    for pkg in pkglist:
        try:
            deb = cache.download_binary(pkg.name,
                                        '/var/cache/elbe/binaries/added',
                                        pkg.installed_version)
            repo.includedeb(deb, 'added')
        except KeyError as ke:
            log.printo(str(ke))
        except ValueError as ve:
            log.printo("No Package " + pkg.name + "-" + pkg.installed_version)
        except FetchError as fe:
            log.printo("Package " + pkg.name + "-" +
                       str(pkg.installed_version) + " could not be downloaded")
        except TypeError as te:
            log.printo("Package " + pkg.name + "-" + pkg.installed_version +
                       " missing name or version")

    # Mark the binary repo with the necessary Files
    # to make the installer accept this as a CDRom
    repo_fs = Filesystem(repo_path)
    repo_fs.mkdir_p(".disk")
    repo_fs.write_file(".disk/base_installable", 0644, "main\n")
    repo_fs.write_file(".disk/base_components", 0644, "main\n")
    repo_fs.write_file(".disk/cd_type", 0644, "not_complete\n")
    repo_fs.write_file(".disk/info", 0644, "elbe inst cdrom - full cd\n")
    repo_fs.symlink(".", "debian", allow_exists=True)
    repo_fs.write_file("md5sum.txt", 0644, "")

    # write source xml onto cdrom
    xml.xml.write(repo_fs.fname('source.xml'))

    return repo.buildiso(os.path.join(target, "bin-cdrom.iso"))
예제 #16
0
#!/usr/bin/python

from apt import Cache
import argparse
from collections import namedtuple
import re
import subprocess
import sys

APT_CACHE = Cache()
DEPENDS = ('Depends', 'PreDepends', 'Recommends')
DEPENDENCY_RE = re.compile(
    ' +(?P<type>[^:]+): (?P<pkg_name>\S+)(?: (?P<version>.+))')
DOTTY_STYLE = {
    'Reverse Recommends': '[style=dotted color="#999999"]',
}

PackageDependency = namedtuple('PackageDependency',
                               ('name', 'type', 'version'))
TreeNode = namedtuple('TreeNode', ('name', 'children'))


class Error(Exception):
    """Error occurred."""


class DependencyTree(object):
    def __init__(self, package):
        self.package_ = package
        self.dependencies_ = None
        self.tree_ = None
예제 #17
0
class QPackagesModel(QtCore.QAbstractTableModel):
    """
        Model used to handle the packages listed in a .ini file
    """
    def __init__(self, inifile):
        """
            :param inifile:
                Path to the packages.ini ini file
        """
        QtCore.QAbstractTableModel.__init__(self, None)
        self.packages = []
        self.to_install = []
        self.inifile = inifile
        self.cache = Cache()

        self._populate()

    def _populate(self):
        """
            Load the inifile with the package names and description
        """
        pkg_dict = DictIni(self.inifile)
        for section, packages in pkg_dict.items():
            for pkg_name, pkg_description in packages.iteritems():
                if self.cache.has_key(pkg_name):
                    pkg = self.cache[pkg_name]
                    if pkg.is_installed:
                        status = 2
                    else:
                        status = 0
                else:
                    pkg = None
                    status = 1
                    print("This package  : '%s:%s' isn't available in your \
configured repositories" % (pkg_name, pkg_description))
                self.packages.append(
                        {'name':pkg_name,
                         'description':pkg_description,
                         'package':pkg,
                         'status':status,
                         'section':section})

        self.packages = sorted(self.packages, key=lambda a:a['status'])
        self.reset()

    def _get_pkg(self, index):
        """
            Returns a pkg object for a given index
        """
        return self.packages[index.row()]['package']

    def _init_cache(self):
        """
            Initialize our cache for apt
        """
        self.cache = Cache()

    def rowCount(self, index=None):
        """
            Required function
            Returns the number of rows
        """
        return len(self.packages)

    def columnCount(self, index=None):
        """
            Required function
            Returns the number of columns
        """
        return len(TABLE_HEADER)

    def data(self, index, role):
        """
            Required function
            Returns the model's datas
        """
        if not index.isValid() or not (0 <= index.row() < self.rowCount()):
            return QtCore.QVariant()

        column = index.column()
        package = self.packages[index.row()]

        if role == QtCore.Qt.DisplayRole:
            return self.render_cell(package, column)
        elif role == QtCore.Qt.CheckStateRole:
            return self._cell_check_status(package['package'], column)
        elif role == QtCore.Qt.BackgroundColorRole:
            return self._cell_color(package['status'])


    @staticmethod
    def render_cell(package, column):
        """
            Return the column's cell content for the given package
        """
        if column == 0:
            return package['name']
        elif column == 1:
            return package['description'].decode('utf-8')
        elif column == 2:
            if not package['package']:
                return "Not Available"

    def _cell_check_status(self, pkg, column):
        """
            Returns the row's Qchecked status
        """
        if column == 2:
            if pkg and (pkg.installed or pkg.marked_install):
                return QtCore.QVariant(QtCore.Qt.Checked)
            else:
                return QtCore.QVariant(QtCore.Qt.Unchecked)

    @staticmethod
    def _cell_color(status):
        """
            Returns the cell color
        """
        if status == 2:
            return QtGui.QColor(255, 0, 0, 127)
        elif status == 1:
            return QtGui.QColor(255, 255, 0, 127)
        elif status == 0:
            return QtGui.QColor(255, 255, 255, 127)
        else:
            return QtGui.QColor(255, 127, 0, 200)

    def setData(self, index, value, role):
        """
            Changes datas informations
        """
        if role == QtCore.Qt.CheckStateRole and index.column() == 2:
            pkg = self._get_pkg(index)
            if not pkg.installed:
                if value == QtCore.QVariant(QtCore.Qt.Checked):
                    pkg.mark_install()
                    self.packages[index.row()]['status'] = -1
                    self.to_install.append(True)
                    self.emit(QtCore.SIGNAL("dataChanged(int)"),
                              len(self.to_install))
                else:
                    pkg.mark_delete()
                    self.packages[index.row()]['status'] = 0
                    self.to_install.pop()
                    self.emit(QtCore.SIGNAL("dataChanged(int)"),
                            len(self.to_install))
                ans = True
        else:
            ans = QtCore.QAbstractTableModel.setData(self, modelIndex, variant, role)
        return ans

    def flags(self, index):
        """
            Add a flag to indicate whether a field is editable/checkable ...
        """
        if not index.isValid():
            return QtCore.Qt.ItemIsEnabled

        pkg = self._get_pkg(index)
        ans = QtCore.QAbstractTableModel.flags(self, index)
        if pkg and not pkg.installed:
            if index.column() == 2:
                ans |= QtCore.Qt.ItemIsUserCheckable
                ans |= QtCore.Qt.ItemIsEditable
                ans |= QtCore.Qt.ItemIsSelectable
        else:
            ans &= ~QtCore.Qt.ItemIsEnabled
        return ans

    def headerData(self, section, orientation, role):
        """
            Native optionnal function
            Returns the table's header infos
            :orientation:
                QtCore.Qt:Orientation
            :role:
                QtCore.Qt:Role
        """
        # Alignement
        if role == QtCore.Qt.TextAlignmentRole:
            if orientation == QtCore.Qt.Horizontal:
                return QtCore.QVariant(int(QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter))
            return QtCore.QVariant(int(QtCore.Qt.AlignRight|QtCore.Qt.AlignVCenter))

        # Headers
        if role != QtCore.Qt.DisplayRole:
            return QtCore.QVariant()
        if orientation == QtCore.Qt.Horizontal:
            ret_val = TABLE_HEADER[section]
            return QtCore.QVariant(ret_val)
        else:
            return QtCore.QVariant(int(section + 1))

    #Cache update and file
    def update(self):
        """
            Update our apt cache and our packages list
        """
        acquire = QAcquireProgress()
        self._init_cache()

        print("Connecting slots")
        for sign in (QtCore.SIGNAL('statusChanged'), QtCore.SIGNAL('statusStarted()'),
                                            QtCore.SIGNAL('statusFinished()'),):
            self.connect(acquire, sign, self._reemit(sign))
        self.connect(acquire, QtCore.SIGNAL('statusFinished()'), self._update_datas)
        self.cache.update(acquire)

    def _update_datas(self):
        """
            Refresh the table's datas when a update has been called
        """
        self.cache = Cache()
        self.packages = []
        self.to_install = []
        self._populate()

    def install(self):
        """
            Install a list of packages
            @packages : a list of packages' names
        """
        acquire = QAcquireProgress()
        #FIXME : Il y a encore deux trois trucs qui vont pas
        install = QInstallProgress()
        print("Connecting slots")
        for sign in (QtCore.SIGNAL('statusChanged'), QtCore.SIGNAL('statusStarted()'),
                                            QtCore.SIGNAL('statusFinished()'),):
            self.connect(acquire, sign, self._reemit(sign))
            self.connect(install, sign, self._reemit(sign))
        self.connect(install, QtCore.SIGNAL('statusFinished()'),
                     self._update_datas)
        self.cache.commit(acquire, install)

    def _reemit(self, signal):
        """
            Returns a _reemit func for the given signal
        """
        em = self.emit
        def emitter(*args, **kw):
            em(signal, *args, **kw)
        return emitter
예제 #18
0
class RPCAPTCache(InChRootObject):
    def __init__( self, rfs, logpath, arch, notifier=None, norecommend = False, noauth = True ):
        self.log = ASCIIDocLog(logpath)
        self.notifier = notifier
        InChRootObject.__init__(self, rfs)
        config.set ("APT::Architecture", arch)
        if norecommend:
            config.set ("APT::Install-Recommends", "1")
        else:
            config.set ("APT::Install-Recommends", "0")

        if noauth:
            config.set ("APT::Get::AllowUnauthenticated", "1")
        else:
            config.set ("APT::Get::AllowUnauthenticated", "0")

        self.cache = Cache()
        self.cache.open()

    def dbg_dump( self, filename ):
        ts = time.localtime ()
        filename = filename + (
                '_%02d%02d%02d' % (ts.tm_hour, ts.tm_min, ts.tm_sec))
        with open (filename, 'w') as dbg:
            for p in self.cache:
                dbg.write ('%s %s %d %d %d %d %d %d %d %d %d %d %d %d\n' % (
                    p.name, p.candidate.version, p.marked_keep, p.marked_delete,
                    p.marked_upgrade, p.marked_downgrade, p.marked_install,
                    p.marked_reinstall, p.is_auto_installed, p.is_installed,
                    p.is_auto_removable, p.is_now_broken, p.is_inst_broken,
                    p.is_upgradable))

    def get_sections( self ):
        ret = list(set( [p.section for p in self.cache] ))
        ret.sort()
        return ret

    def get_pkglist( self, section ):
        if section == 'all':
            ret = [ APTPackage(p) for p in self.cache ]
        else:
            ret = [ APTPackage(p) for p in self.cache if p.section == section ]

        return ret

    def mark_install( self, pkgname, version, from_user=True, nodeps=False ):
        p = self.cache[pkgname]
        if version:
            p.candidate = p.versions[ version ]
        p.mark_install( auto_fix = not nodeps,
                auto_inst = not nodeps,
                from_user = from_user )

    def mark_install_devpkgs( self, ignore_pkgs ):
        ignore_pkgs.remove ('libc6') # we don't want to ignore libc
        # we don't want to ignore libstdc++
        try:
            ignore_pkgs.remove ('libstdc++5')
        except:
            pass
        try:
            ignore_pkgs.remove ('libstdc++6')
        except:
            pass
        # list all debian src packages of all installed packages that don't
        # come from debootstrap
        src_list = [p.candidate.source_name for p in self.cache if p.is_installed and p.name not in ignore_pkgs ]
        # go through all packages, remember package if its source package
        # matches one of the installed packages and the binary package is a
        # '-dev' package
        dev_list = [s for s in self.cache if (s.candidate.source_name in src_list and s.name.endswith ('-dev'))]
        for p in dev_list:
            p.mark_install ()
        # ensure that the symlinks package will be installed (it's needed for
        # fixing links inside the sysroot
        self.cache ['symlinks'].mark_install ()

    def cleanup (self, exclude_pkgs):
        for p in self.cache:
            if (p.is_installed and not p.is_auto_installed) or p.is_auto_removable:
                remove = True
                for x in exclude_pkgs:
                    if x == p.name:
                        remove = False
                if remove:
                    p.mark_delete( auto_fix=True, purge=True )

    def mark_upgrade( self, pkgname, version ):
        p = self.cache[pkgname]
        if version:
            p.candidate = p.versions[ version ]
        p.mark_upgrade()

    def mark_delete( self, pkgname, version ):
        p = self.cache[pkgname]
        p.mark_delete( purge=True )

    def mark_keep( self, pkgname, version ):
        p = self.cache[pkgname]
        p.mark_keep()


    def update( self ):
        self.cache.update()
        self.cache.open()

    def co_cb(self, msg):
        if self.notifier:
            self.notifier.status (msg)

    def commit(self):
        os.environ["DEBIAN_FRONTEND"]="noninteractive"
        os.environ["DEBONF_NONINTERACTIVE_SEEN"]="true"
        self.cache.commit( ElbeAcquireProgress(cb=self.co_cb),
                           ElbeInstallProgress(cb=self.co_cb) )
        self.cache.open()

    def clear(self):
        self.cache.clear()

    def get_dependencies(self, pkgname):
        deps = getalldeps( self.cache, pkgname )
        return [APTPackage(p, cache=self.cache) for p in deps]

    def get_installed_pkgs( self, section='all' ):
        if section == 'all':
            return [APTPackage(p) for p in self.cache if p.is_installed]
        else:
            return [APTPackage(p) for p in self.cache if (p.section == section
                and p.is_installed)]

    def get_fileindex( self ):
        index = {}

        for p in self.cache:
            if p.is_installed:
                for f in p.installed_files:
                    index[f] = p.name

        return index

    def get_marked_install( self, section='all' ):
        if section == 'all':
            ret = [APTPackage(p) for p in self.cache if p.marked_install]
        else:
            ret = [APTPackage(p) for p in self.cache if (p.section == section
                and p.marked_install)]
        return ret

    def get_upgradeable(self, section='all'):
        if section == 'all':
            ret = [ APTPackage(p) for p in self.cache if p.is_upgradable]
        else:
            ret = [ APTPackage(p) for p in self.cache if (p.section == section
                and p.is_upgradable)]
        return ret

    def upgrade( self, dist_upgrade = False ):
        self.cache.upgrade( dist_upgrade )

    def get_changes( self ):
        changes = self.cache.get_changes()
        return [ APTPackage(p) for p in changes ]

    def has_pkg( self, pkgname ):
        return pkgname in self.cache

    def is_installed( self, pkgname ):
        if not pkgname in self.cache:
            return False
        return self.cache[pkgname].is_installed

    def get_pkg( self, pkgname ):
        return APTPackage( self.cache[pkgname] )

    def get_pkgs( self, pkgname ):
        return [APTPackage (self.cache[p]) for p in sorted (self.cache.keys()) if pkgname in p.lower()]

    def compare_versions( self, ver1, ver2 ):
        return version_compare( ver1, ver2 )

    def download_binary( self, pkgname, path, version=None ):
        p = self.cache[pkgname]
        if version is None:
            pkgver = p.installed
        else:
            pkgver = p.versions[version]

        rel_filename = pkgver.fetch_binary(path,
                ElbeAcquireProgress(cb=self.co_cb))
        return self.rfs.fname( rel_filename )

    def download_source( self, pkgname, path, version=None ):
        p = self.cache[pkgname]
        if version is None:
            pkgver = p.installed
        else:
            pkgver = p.versions[version]

        rel_filename = pkgver.fetch_source(path,
                ElbeAcquireProgress(cb=self.co_cb), unpack=False)
        return self.rfs.fname( rel_filename )
예제 #19
0
파일: cdroms.py 프로젝트: atoz-chevara/elbe
def mk_binary_cdrom(rfs, arch, codename, init_codename, xml, target, log, cdrom_size=CDROM_SIZE):

    rfs.mkdir_p( '/var/cache/elbe/binaries/added' )
    rfs.mkdir_p( '/var/cache/elbe/binaries/main' )
    hostfs.mkdir_p( '/var/cache/elbe/binaries/main' )

    if not xml is None:
        mirror = xml.get_primary_mirror (rfs.fname("cdrom"))
    else:
        mirror='http://ftp.debian.org/debian'

    repo_path = os.path.join (target, "binrepo")

    repo = CdromBinRepo (arch, codename, init_codename,
                         repo_path, log, cdrom_size, mirror)

    if not xml is None:
        pkglist = get_initvm_pkglist()
        cache = Cache ()
        cache.open ()
        for pkg in pkglist:
            try:
                p = cache [pkg.name]
                if pkg.name == 'elbe-bootstrap':
                    pkgver = p.versions [0]
                else:
                    pkgver = p.installed
                deb = pkgver.fetch_binary ('/var/cache/elbe/binaries/main',
                                            ElbeAcquireProgress (cb=None) )
                repo.include_init_deb(deb, 'main')
            except ValueError as ve:
                log.printo( "No Package " + pkg.name + "-" + str(pkg.installed_version) )
            except FetchError as fe:
                log.printo( "Package " + pkg.name + "-" + pkgver.version + " could not be downloaded" )
            except TypeError as te:
                log.printo( "Package " + pkg.name + "-" + str(pkg.installed_version) + " missing name or version" )

        cache = get_rpcaptcache( rfs, "aptcache.log", arch )
        for p in xml.node("debootstrappkgs"):
            pkg = XMLPackage(p, arch)
            try:
                deb = cache.download_binary( pkg.name,
                                             '/var/cache/elbe/binaries/main',
                                             pkg.installed_version )
                repo.includedeb(deb, 'main')
            except ValueError as ve:
                log.printo( "No Package " + pkg.name + "-" + pkg.installed_version )
            except FetchError as fe:
                log.printo( "Package " + pkg.name + "-" + pkg.installed_version + " could not be downloaded" )
            except TypeError as te:
                log.printo( "Package " + pkg.name + "-" + pkg.installed_version + " missing name or version" )

    cache = get_rpcaptcache( rfs, "aptcache.log", arch )
    pkglist = cache.get_installed_pkgs ()
    for pkg in pkglist:
        try:
            deb = cache.download_binary( pkg.name,
                                        '/var/cache/elbe/binaries/added',
                                        pkg.installed_version )
            repo.includedeb(deb, 'added')
        except KeyError as ke:
            log.printo( str (ke) )
        except ValueError as ve:
            log.printo( "No Package " + pkg.name + "-" + pkg.installed_version )
        except FetchError as fe:
            log.printo( "Package " + pkg.name + "-" + str (pkg.installed_version) + " could not be downloaded" )
        except TypeError as te:
            log.printo( "Package " + pkg.name + "-" + pkg.installed_version + " missing name or version" )

    # Mark the binary repo with the necessary Files
    # to make the installer accept this as a CDRom
    repo_fs = Filesystem( repo_path )
    repo_fs.mkdir_p (".disk")
    repo_fs.write_file (".disk/base_installable", 0644, "main\n")
    repo_fs.write_file (".disk/base_components", 0644, "main\n")
    repo_fs.write_file (".disk/cd_type", 0644, "not_complete\n")
    repo_fs.write_file (".disk/info", 0644, "elbe inst cdrom - full cd\n")
    repo_fs.symlink (".", "debian", allow_exists=True)
    repo_fs.write_file ("md5sum.txt", 0644, "")

    # write source xml onto cdrom
    xml.xml.write (repo_fs.fname ('source.xml'))

    return repo.buildiso( os.path.join( target, "bin-cdrom.iso" ) )
예제 #20
0
# GLOBAL VAR
# Local path to the inbox of all tutoriel into the home directory of the current user
C_LOCAL_TUTO = "%s/.local/share/libtuto/" % getpwuid(getuid())[5]

C_HOST_TUTO = "tutorial/"

# INITIALIZATION

with DbMysql() as db, Fileftp() as ftp:

    if not path.exists(C_LOCAL_TUTO):
        makedirs(C_LOCAL_TUTO)

    # GET ALL SOFTWARE AND THERE TUTORIAL
    cache = Cache()

    # SQL query to get all software and there version handle by our community
    s_query = 'Select sv.idversion,st.name,sv.number from software_tuto st join soft_version sv on st.idsoft=sv.idsoft'
    l_result = db.executeReturnAll(s_query)

    for pkgQuery in l_result:
        i_id, s_tutosoft, s_tutovers = pkgQuery

        found = False
        if s_tutosoft in cache:
            if cache[s_tutosoft].is_installed:
                if cache[s_tutosoft].installed.source_version == s_tutovers:
                    found = True

        if found:
예제 #21
0
 def __init__(self, packages_sink):
     self.cache = Cache()
     self.missing: set = set()
     self.sink = packages_sink
예제 #22
0
 def _init_cache(self):
     """
         Initialize our cache for apt
     """
     self.cache = Cache()
예제 #23
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description='Lists available binary packages and versions which are'
        'needed to satisfy rosdep keys for ROS packages in the workspace')

    # Positional
    add_argument_rosdistro_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)

    add_argument_output_dir(parser)
    add_argument_package_selection_args(parser)
    add_argument_skip_rosdep_keys(parser)
    parser.add_argument('--package-root',
                        nargs='+',
                        help='The path to the directory containing packages')
    args = parser.parse_args(argv)

    workspace_root = args.package_root[-1]
    os.chdir(workspace_root)

    with Scope('SUBSECTION', 'mark packages with IGNORE files'):
        all_packages = locate_packages(workspace_root)
        selected_packages = all_packages
        if args.package_selection_args:
            print('Using package selection arguments:',
                  args.package_selection_args)
            selected_packages = locate_packages(
                workspace_root, extra_args=args.package_selection_args)

            to_ignore = all_packages.keys() - selected_packages.keys()
            print('Ignoring %d packages' % len(to_ignore))
            for package in sorted(to_ignore):
                print('-', package)
                package_root = all_packages[package]
                Path(package_root, 'COLCON_IGNORE').touch()

        print('There are %d packages which meet selection criteria' %
              len(selected_packages))

    with Scope('SUBSECTION', 'Enumerating packages needed to build'):
        # find all of the underlay packages
        underlay_pkgs = {}
        all_underlay_pkg_names = set()
        for package_root in args.package_root[0:-1]:
            print("Crawling for packages in '%s'" % package_root)
            underlay_pkgs.update(find_packages(package_root))

            # Check for a colcon index for non-ROS package detection
            colcon_index = os.path.join(package_root, 'colcon-core',
                                        'packages')
            try:
                all_underlay_pkg_names.update(os.listdir(colcon_index))
            except FileNotFoundError:
                pass

        underlay_pkg_names = [pkg.name for pkg in underlay_pkgs.values()]
        print('Found the following ROS underlay packages:')
        for pkg_name in sorted(underlay_pkg_names):
            print('  -', pkg_name)

        # get direct build dependencies
        package_root = args.package_root[-1]
        print("Crawling for packages in '%s'" % package_root)
        pkgs = find_packages(package_root)

        pkg_names = [pkg.name for pkg in pkgs.values()]
        print('Found the following ROS packages:')
        for pkg_name in sorted(pkg_names):
            print('  -', pkg_name)

        # get build dependencies and map them to binary packages
        all_pkgs = set(pkgs.values()).union(underlay_pkgs.values())

        for pkg in all_pkgs:
            pkg.evaluate_conditions(os.environ)
        for pkg in all_pkgs:
            for group_depend in pkg.group_depends:
                if group_depend.evaluated_condition is not False:
                    group_depend.extract_group_members(all_pkgs)

        dependency_keys_build = get_dependencies(
            all_pkgs, 'build', _get_build_and_recursive_run_dependencies,
            pkgs.values())

        dependency_keys_test = get_dependencies(
            all_pkgs, 'run and test', _get_test_and_recursive_run_dependencies,
            pkgs.values())

        if args.skip_rosdep_keys:
            dependency_keys_build.difference_update(args.skip_rosdep_keys)
            dependency_keys_test.difference_update(args.skip_rosdep_keys)

        # remove all non-ROS packages and packages which are present but
        # specifically ignored
        every_package_name = all_packages.keys() | all_underlay_pkg_names
        dependency_keys_build -= every_package_name
        dependency_keys_test -= every_package_name

        context = initialize_resolver(args.rosdistro_name, args.os_name,
                                      args.os_code_name)

        os_pkg_names_build = resolve_names(dependency_keys_build, **context)
        os_pkg_names_test = resolve_names(dependency_keys_test, **context)

        os_pkg_names_test -= os_pkg_names_build

    with Scope('SUBSECTION', 'Resolving packages versions using apt cache'):
        apt_cache = Cache()
        os_pkg_versions = get_binary_package_versions(
            apt_cache, os_pkg_names_build | os_pkg_names_test)

    with open(os.path.join(args.output_dir, 'install_list_build.txt'),
              'w') as out_file:
        for package in sorted(os_pkg_names_build):
            out_file.write('# break docker cache %s=%s\n' %
                           (package, os_pkg_versions[package]))
            out_file.write('%s\n' % (package))

    with open(os.path.join(args.output_dir, 'install_list_test.txt'),
              'w') as out_file:
        for package in sorted(os_pkg_names_test):
            out_file.write('# break docker cache %s=%s\n' %
                           (package, os_pkg_versions[package]))
            out_file.write('%s\n' % (package))
예제 #24
0
def get_initvm_pkglist():
    cache = Cache()
    cache.open()
    pkglist = [APTPackage(p) for p in cache if p.is_installed]

    return pkglist
예제 #25
0
class RPCAPTCache(InChRootObject):

    # pylint: disable=too-many-public-methods
    def __init__(self,
                 rfs,
                 arch,
                 notifier=None,
                 norecommend=False,
                 noauth=True):

        # pylint: disable=too-many-arguments
        InChRootObject.__init__(self, rfs)

        self.notifier = notifier
        config.set("APT::Architecture", arch)
        if norecommend:
            config.set("APT::Install-Recommends", "0")
        else:
            config.set("APT::Install-Recommends", "1")

        if noauth:
            config.set("APT::Get::AllowUnauthenticated", "1")
        else:
            config.set("APT::Get::AllowUnauthenticated", "0")

        self.cache = Cache(progress=ElbeOpProgress())
        self.cache.open(progress=ElbeOpProgress())

    def dbg_dump(self, filename):
        ts = time.localtime()
        filename = filename + ('_%02d%02d%02d' %
                               (ts.tm_hour, ts.tm_min, ts.tm_sec))
        with open(filename, 'w') as dbg:
            for p in self.cache:
                dbg.write(
                    '%s %s %d %d %d %d %d %d %d %d %d %d %d %d\n' %
                    (p.name, p.candidate.version, p.marked_keep,
                     p.marked_delete, p.marked_upgrade, p.marked_downgrade,
                     p.marked_install, p.marked_reinstall, p.is_auto_installed,
                     p.is_installed, p.is_auto_removable, p.is_now_broken,
                     p.is_inst_broken, p.is_upgradable))

    def get_sections(self):
        ret = list({p.section for p in self.cache})
        ret.sort()
        return ret

    def get_pkglist(self, section):
        if section == 'all':
            ret = [APTPackage(p) for p in self.cache]
        else:
            ret = [APTPackage(p) for p in self.cache if p.section == section]

        return ret

    def mark_install(self, pkgname, version, from_user=True, nodeps=False):
        print('Mark for install "%s"' % pkgname)
        p = self.cache[pkgname]
        if version:
            p.candidate = p.versions[version]
        p.mark_install(auto_fix=not nodeps,
                       auto_inst=not nodeps,
                       from_user=from_user)

    def mark_install_devpkgs(self, ignore_pkgs, ignore_dev_pkgs):

        # we don't want to ignore libc
        ignore_pkgs.discard('libc6')
        ignore_pkgs.discard('libstdc++5')
        ignore_pkgs.discard('libstdc++6')

        # list all debian src packages of all installed packages that
        # don't come from debootstrap
        src_name_lst = []
        version_dict = {}

        for pkg in self.cache:
            if pkg.is_installed and pkg.name not in ignore_pkgs:
                src_name = pkg.candidate.source_name
                src_name_lst.append(src_name)
                version_dict[pkg.name] = pkg.candidate.version
                version_dict[src_name] = pkg.candidate.version

        def mark_install(pkg_lst, suffix):

            for pkg in pkg_lst:

                if pkg.name in ignore_dev_pkgs:
                    continue

                name_no_suffix = pkg.name[:-len(suffix)]

                if name_no_suffix in version_dict:

                    version = version_dict[name_no_suffix]
                    candidate = pkg.versions.get(version)

                    if candidate:
                        pkg.candidate = candidate

                pkg.mark_install()

        # go through all packages, remember package if its source
        # package matches one of the installed packages and the binary
        # package is a '-dev' package
        dev_lst = []

        for pkg in self.cache:

            if not pkg.name.endswith("-dev"):
                continue

            src_name = pkg.candidate.source_name

            if src_name not in version_dict:
                continue

            src_version = pkg.candidate.source_version

            if src_version != version_dict[src_name]:
                continue

            dev_lst.append(pkg)

        mark_install(dev_lst, "-dev")

        # ensure that the symlinks package will be installed (it's
        # needed for fixing links inside the sysroot
        self.cache['symlinks'].mark_install()

        for pkg in ignore_dev_pkgs:
            self.cache[pkg].mark_delete()

        dbgsym_lst = []

        for pkg in self.cache:

            if pkg.is_installed or pkg.marked_install:

                dbg_pkg = "%s-dbgsym" % pkg.name

                if dbg_pkg in self.cache:
                    dbgsym_lst.append(self.cache[dbg_pkg])

        mark_install(dbgsym_lst, "-dbgsym")

    def cleanup(self, exclude_pkgs):
        for p in self.cache:
            if p.is_installed and not \
               p.is_auto_installed or \
               p.is_auto_removable:
                remove = True
                for x in exclude_pkgs:
                    if x == p.name:
                        remove = False
                if remove:
                    p.mark_delete(auto_fix=True, purge=True)

    def mark_upgrade(self, pkgname, version):
        p = self.cache[pkgname]
        if version:
            p.candidate = p.versions[version]
        p.mark_upgrade()

    def mark_delete(self, pkgname):
        p = self.cache[pkgname]
        p.mark_delete(purge=True)

    def mark_keep(self, pkgname, _version):
        p = self.cache[pkgname]
        p.mark_keep()

    def update(self):
        self.cache.update(fetch_progress=ElbeAcquireProgress())
        self.cache.open(progress=ElbeOpProgress())

    def commit(self):
        os.environ["DEBIAN_FRONTEND"] = "noninteractive"
        os.environ["DEBONF_NONINTERACTIVE_SEEN"] = "true"
        print("Commiting changes ...")
        self.cache.commit(ElbeAcquireProgress(),
                          ElbeInstallProgress(fileno=sys.stdout.fileno()))
        self.cache.open(progress=ElbeOpProgress())

    def clear(self):
        self.cache.clear()

    def get_dependencies(self, pkgname):
        deps = getalldeps(self.cache, pkgname)
        return [APTPackage(p, cache=self.cache) for p in deps]

    def get_installed_pkgs(self, section='all'):
        if section == 'all':
            pl = [APTPackage(p) for p in self.cache if p.is_installed]
        else:
            pl = [
                APTPackage(p) for p in self.cache
                if (p.section == section and p.is_installed)
            ]
        return pl

    def get_fileindex(self):
        index = {}

        for p in self.cache:
            if p.is_installed:
                for f in p.installed_files:
                    index[f] = p.name

        return index

    def get_marked_install(self, section='all'):
        if section == 'all':
            ret = [APTPackage(p) for p in self.cache if p.marked_install]
        else:
            ret = [
                APTPackage(p) for p in self.cache
                if (p.section == section and p.marked_install)
            ]
        return ret

    def get_upgradeable(self, section='all'):
        if section == 'all':
            ret = [APTPackage(p) for p in self.cache if p.is_upgradable]
        else:
            ret = [
                APTPackage(p) for p in self.cache
                if (p.section == section and p.is_upgradable)
            ]
        return ret

    def upgrade(self, dist_upgrade=False):
        self.cache.upgrade(dist_upgrade)

    def get_changes(self):
        changes = self.cache.get_changes()
        return [APTPackage(p) for p in changes]

    def has_pkg(self, pkgname):
        return pkgname in self.cache

    def is_installed(self, pkgname):
        if pkgname not in self.cache:
            return False
        return self.cache[pkgname].is_installed

    def get_pkg(self, pkgname):
        return APTPackage(self.cache[pkgname])

    def get_pkgs(self, pkgname):
        return [
            APTPackage(self.cache[p]) for p in sorted(self.cache.keys())
            if pkgname in p.lower()
        ]

    def get_corresponding_source_packages(self, pkg_lst=None):

        if pkg_lst is None:
            pkg_lst = {p.name for p in self.cache if p.is_installed}

        src_set = set()

        with TagFile('/var/lib/dpkg/status') as tagfile:
            for section in tagfile:

                pkg = section['Package']

                if pkg not in pkg_lst:
                    continue

                tmp = self.cache[pkg].installed or self.cache[pkg].candidate

                src_set.add((tmp.source_name, tmp.source_version))

                if "Built-Using" not in section:
                    continue

                built_using_lst = section["Built-Using"].split(', ')
                for built_using in built_using_lst:
                    name, version = built_using.split(' ', 1)
                    version = version.strip('(= )')
                    src_set.add((name, version))

        return list(src_set)

    @staticmethod
    def compare_versions(self, ver1, ver2):
        return version_compare(ver1, ver2)

    def download_binary(self, pkgname, path, version=None):
        p = self.cache[pkgname]
        if version is None:
            pkgver = p.installed
        else:
            pkgver = p.versions[version]
        rel_filename = fetch_binary(pkgver, path, ElbeAcquireProgress())
        return self.rfs.fname(rel_filename)

    def download_source(self, src_name, src_version, dest_dir):

        allow_untrusted = config.find_b("APT::Get::AllowUnauthenticated",
                                        False)

        rec = SourceRecords()
        acq = Acquire(ElbeAcquireProgress())

        # poorman's iterator
        while True:
            next_p = rec.lookup(src_name)
            # End of the list?
            if not next_p:
                raise ValueError("No source found for %s_%s" %
                                 (src_name, src_version))
            if src_version == rec.version:
                break

        # We don't allow untrusted package and the package is not
        # marks as trusted
        if not (allow_untrusted or rec.index.is_trusted):
            raise FetchError(
                "Can't fetch source %s_%s; Source %r is not trusted" %
                (src_name, src_version, rec.index.describe))

        # Copy from src to dst all files of the source package
        dsc = None
        files = []
        for _file in rec.files:
            src = os.path.basename(_file.path)
            dst = os.path.join(dest_dir, src)

            if 'dsc' == _file.type:
                dsc = dst

            if not (allow_untrusted or _file.hashes.usable):
                raise FetchError(
                    "Can't fetch file %s. No trusted hash found." % dst)

            # acq is accumlating the AcquireFile, the files list only
            # exists to prevent Python from GC the object .. I guess.
            # Anyway, if we don't keep the list, We will get an empty
            # directory
            files.append(
                AcquireFile(acq,
                            rec.index.archive_uri(_file.path),
                            _file.hashes,
                            _file.size,
                            src,
                            destfile=dst))
        acq.run()

        if dsc is None:
            raise ValueError("No source found for %s_%s" %
                             (src_name, src_version))

        for item in acq.items:
            if item.STAT_DONE != item.status:
                raise FetchError("Can't fetch item %s: %s" %
                                 (item.destfile, item.error_text))

        return self.rfs.fname(os.path.abspath(dsc))
예제 #26
0
파일: cdroms.py 프로젝트: zumbi/elbe
def mk_source_cdrom(rfs,
                    arch,
                    codename,
                    init_codename,
                    target,
                    log,
                    cdrom_size=CDROM_SIZE,
                    xml=None):

    hostfs.mkdir_p('/var/cache/elbe/sources')
    rfs.mkdir_p('/var/cache/elbe/sources')

    repo = CdromSrcRepo(codename, init_codename,
                        os.path.join(target, "srcrepo"), log, cdrom_size)

    cache = get_rpcaptcache(rfs, "aptcache.log", arch)

    pkglist = cache.get_installed_pkgs()

    forbiddenPackages = []
    if xml != None and xml.has('target/pkg-list'):
        for i in xml.node('target/pkg-list'):
            try:
                if i.tag == 'pkg' and i.et.attrib['on_src_cd'] == 'False':
                    forbiddenPackages.append(i.text('.').strip())

            except KeyError:
                pass

    for pkg in pkglist:
        # Do not include forbidden packages in src cdrom
        if pkg.name in forbiddenPackages:
            continue
        try:
            dsc = cache.download_source(pkg.name, '/var/cache/elbe/sources')
            repo.includedsc(dsc, force=True)
        except ValueError as ve:
            log.printo("No sources for Package " + pkg.name + "-" +
                       pkg.installed_version)
        except FetchError as fe:
            log.printo("Source for Package " + pkg.name + "-" +
                       pkg.installed_version + " could not be downloaded")

    repo.finalize()

    pkglist = get_initvm_pkglist()
    cache = Cache()
    cache.open()

    for pkg in pkglist:
        # Do not include forbidden packages in src cdrom
        if pkg.name in forbiddenPackages:
            continue
        try:
            p = cache[pkg.name]
            if pkg.name == 'elbe-bootstrap':
                pkgver = p.versions[0]
            else:
                pkgver = p.installed

            dsc = pkgver.fetch_source('/var/cache/elbe/sources',
                                      ElbeAcquireProgress(cb=None),
                                      unpack=False)
            repo.includedsc(dsc)
        except ValueError as ve:
            log.printo("No sources for Package " + pkg.name + "-" +
                       str(pkg.installed_version))
        except FetchError as fe:
            log.printo("Source for Package " + pkg.name + "-" +
                       pkgver.version + " could not be downloaded")

    repo.finalize()

    return repo.buildiso(os.path.join(target, "src-cdrom.iso"))
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for the doc job")
    add_argument_config_url(parser)
    parser.add_argument(
        '--rosdistro-name',
        required=True,
        help='The name of the ROS distro to identify the setup file to be '
        'sourced')
    add_argument_build_name(parser, 'doc')
    parser.add_argument('--workspace-root',
                        required=True,
                        help='The root path of the workspace to compile')
    parser.add_argument('--rosdoc-lite-dir',
                        required=True,
                        help='The root path of the rosdoc_lite repository')
    parser.add_argument('--catkin-sphinx-dir',
                        required=True,
                        help='The root path of the catkin-sphinx repository')
    parser.add_argument('--rosdoc-index-dir',
                        required=True,
                        help='The root path of the rosdoc_index folder')
    add_argument_repository_name(parser)
    parser.add_argument('--os-name',
                        required=True,
                        help="The OS name (e.g. 'ubuntu')")
    parser.add_argument('--os-code-name',
                        required=True,
                        help="The OS code name (e.g. 'xenial')")
    parser.add_argument('--arch',
                        required=True,
                        help="The architecture (e.g. 'amd64')")
    add_argument_build_tool(parser, required=True)
    add_argument_vcs_information(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_force(parser)
    add_argument_output_dir(parser, required=True)
    add_argument_dockerfile_dir(parser)
    args = parser.parse_args(argv)

    config = get_config_index(args.config_url)

    with Scope('SUBSECTION', 'packages'):
        # find packages in workspace
        source_space = os.path.join(args.workspace_root, 'src')
        print("Crawling for packages in workspace '%s'" % source_space)
        pkgs = find_packages(source_space)

        pkg_names = [pkg.name for pkg in pkgs.values()]
        print('Found the following packages:')
        for pkg_name in sorted(pkg_names):
            print('  -', pkg_name)

        maintainer_emails = set([])
        for pkg in pkgs.values():
            for m in pkg.maintainers:
                maintainer_emails.add(m.email)
        if maintainer_emails:
            print('Package maintainer emails: %s' %
                  ' '.join(sorted(maintainer_emails)))

    rosdoc_index = RosdocIndex(
        [os.path.join(args.rosdoc_index_dir, args.rosdistro_name)])

    vcs_type, vcs_version, vcs_url = args.vcs_info.split(' ', 2)

    with Scope('SUBSECTION', 'determine need to run documentation generation'):
        # compare hashes to determine if documentation needs to be regenerated
        current_hashes = {}
        current_hashes['ros_buildfarm'] = 2  # increase to retrigger doc jobs
        current_hashes['rosdoc_lite'] = get_git_hash(args.rosdoc_lite_dir)
        current_hashes['catkin-sphinx'] = get_git_hash(args.catkin_sphinx_dir)
        repo_dir = os.path.join(args.workspace_root, 'src',
                                args.repository_name)
        current_hashes[args.repository_name] = get_hash(repo_dir)
        print('Current repository hashes: %s' % current_hashes)
        tag_index_hashes = rosdoc_index.hashes.get(args.repository_name, {})
        print('Stored repository hashes: %s' % tag_index_hashes)
        skip_doc_generation = current_hashes == tag_index_hashes

    if skip_doc_generation:
        print('No changes to the source repository or any tooling repository')

        if not args.force:
            print('Skipping generation of documentation')

            # create stamp files
            print('Creating marker files to identify that documentation is ' +
                  'up-to-date')
            create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api'))

            # check if any entry needs to be updated
            print('Creating update manifest.yaml files')
            for pkg_name in pkg_names:
                # update manifest.yaml files
                current_manifest_yaml_file = os.path.join(
                    args.rosdoc_index_dir, args.rosdistro_name, 'api',
                    pkg_name, 'manifest.yaml')
                if not os.path.exists(current_manifest_yaml_file):
                    print('- %s: skipping no manifest.yaml yet' % pkg_name)
                    continue
                with open(current_manifest_yaml_file, 'r') as h:
                    remote_data = yaml.load(h)
                data = copy.deepcopy(remote_data)

                data['vcs'] = vcs_type
                data['vcs_uri'] = vcs_url
                data['vcs_version'] = vcs_version

                data['depends_on'] = sorted(
                    rosdoc_index.reverse_deps.get(pkg_name, []))

                if data == remote_data:
                    print('- %s: skipping same data' % pkg_name)
                    continue

                # write manifest.yaml if it has changes
                print('- %s: api/%s/manifest.yaml' % (pkg_name, pkg_name))
                dst = os.path.join(args.output_dir, 'api', pkg_name,
                                   'manifest.yaml')
                dst_dir = os.path.dirname(dst)
                if not os.path.exists(dst_dir):
                    os.makedirs(dst_dir)
                with open(dst, 'w') as h:
                    yaml.dump(data, h, default_flow_style=False)

            return 0

        print("But job was started with the 'force' parameter set")

    else:
        print('The source repository and/or a tooling repository has changed')

    print('Running generation of documentation')
    rosdoc_index.hashes[args.repository_name] = current_hashes
    rosdoc_index.write_modified_data(args.output_dir, ['hashes'])

    # create stamp files
    print('Creating marker files to identify that documentation is ' +
          'up-to-date')
    create_stamp_files(pkg_names, os.path.join(args.output_dir, 'api_rosdoc'))

    index = get_index(config.rosdistro_index_url)
    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.repository_name in dist_file.repositories
    valid_package_names = \
        set(pkg_names) | set(dist_file.release_packages.keys())

    # update package deps and metapackage deps
    with Scope('SUBSECTION', 'updated rosdoc_index information'):
        for pkg in pkgs.values():
            print("Updating dependendencies for package '%s'" % pkg.name)
            depends = _get_build_run_doc_dependencies(pkg)
            ros_dependency_names = sorted(
                set([d.name for d in depends
                     if d.name in valid_package_names]))
            rosdoc_index.set_forward_deps(pkg.name, ros_dependency_names)

            if pkg.is_metapackage():
                print("Updating dependendencies for metapackage '%s'" %
                      pkg.name)
                depends = _get_run_dependencies(pkg)
                ros_dependency_names = sorted(
                    set([
                        d.name for d in depends
                        if d.name in valid_package_names
                    ]))
            else:
                ros_dependency_names = None
            rosdoc_index.set_metapackage_deps(pkg.name, ros_dependency_names)
        rosdoc_index.write_modified_data(args.output_dir,
                                         ['deps', 'metapackage_deps'])

    # generate changelog html from rst
    package_names_with_changelogs = set([])
    with Scope('SUBSECTION', 'generate changelog html from rst'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)
            assert os.path.exists(os.path.join(abs_pkg_path, 'package.xml'))
            changelog_file = os.path.join(abs_pkg_path, 'CHANGELOG.rst')
            if os.path.exists(changelog_file):
                print(("Package '%s' contains a CHANGELOG.rst, generating " +
                       "html") % pkg.name)
                package_names_with_changelogs.add(pkg.name)

                with open(changelog_file, 'r') as h:
                    rst_code = h.read()
                from docutils.core import publish_string
                html_code = publish_string(rst_code, writer_name='html')
                html_code = html_code.decode()

                # strip system message from html output
                open_tag = re.escape('<div class="first system-message">')
                close_tag = re.escape('</div>')
                pattern = '(' + open_tag + '.+?' + close_tag + ')'
                html_code = re.sub(pattern, '', html_code, flags=re.DOTALL)

                pkg_changelog_doc_path = os.path.join(args.output_dir,
                                                      'changelogs', pkg.name)
                os.makedirs(pkg_changelog_doc_path)
                with open(
                        os.path.join(pkg_changelog_doc_path, 'changelog.html'),
                        'w') as h:
                    h.write(html_code)

    ordered_pkg_tuples = topological_order_packages(pkgs)

    # create rosdoc tag list and location files
    with Scope('SUBSECTION', 'create rosdoc tag list and location files'):
        rosdoc_config_files = {}
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            rosdoc_exports = [
                e.attributes['content'] for e in pkg.exports
                if e.tagname == 'rosdoc' and 'content' in e.attributes
            ]
            prefix = '${prefix}'
            rosdoc_config_file = rosdoc_exports[-1] \
                if rosdoc_exports else '%s/rosdoc.yaml' % prefix
            rosdoc_config_file = rosdoc_config_file.replace(
                prefix, abs_pkg_path)
            if os.path.isfile(rosdoc_config_file):
                rosdoc_config_files[pkg.name] = rosdoc_config_file

        for _, pkg in ordered_pkg_tuples:
            dst = os.path.join(args.output_dir, 'rosdoc_tags',
                               '%s.yaml' % pkg.name)
            print("Generating rosdoc tag list file for package '%s'" %
                  pkg.name)

            dep_names = rosdoc_index.get_recursive_dependencies(pkg.name)
            # make sure that we don't pass our own tagfile to ourself
            # bad things happen when we do this
            assert pkg.name not in dep_names
            locations = []
            for dep_name in sorted(dep_names):
                if dep_name not in rosdoc_index.locations:
                    print("- skipping not existing location file of " +
                          "dependency '%s'" % dep_name)
                    continue
                print("- including location files of dependency '%s'" %
                      dep_name)
                dep_locations = rosdoc_index.locations[dep_name]
                if dep_locations:
                    for dep_location in dep_locations:
                        assert dep_location['package'] == dep_name
                        # update tag information to point to local location
                        location = copy.deepcopy(dep_location)
                        if not location['location'].startswith('file://'):
                            location['location'] = 'file://%s' % os.path.join(
                                args.rosdoc_index_dir, location['location'])
                        locations.append(location)

            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(locations, h)

            print("Creating location file for package '%s'" % pkg.name)
            data = {
                'docs_url':
                '../../../api/%s/html' % pkg.name,
                'location':
                'file://%s' %
                os.path.join(args.output_dir, 'symbols', '%s.tag' % pkg.name),
                'package':
                pkg.name,
            }

            # fetch generator specific output folders from rosdoc_lite
            if pkg.name in rosdoc_config_files:
                output_folders = get_generator_output_folders(
                    rosdoc_config_files[pkg.name], pkg.name)
                if 'doxygen' in output_folders:
                    data['docs_url'] += '/' + output_folders['doxygen']

            rosdoc_index.locations[pkg.name] = [data]
            # do not write these local locations

    # used to determine all source and release jobs
    source_build_files = get_source_build_files(config, args.rosdistro_name)
    release_build_files = get_release_build_files(config, args.rosdistro_name)

    # TODO this should reuse the logic from the job generation
    used_source_build_names = []
    for source_build_name, build_file in source_build_files.items():
        repo_names = build_file.filter_repositories([args.repository_name])
        if not repo_names:
            continue
        matching_dist_file = get_distribution_file_matching_build_file(
            index, args.rosdistro_name, build_file)
        repo = matching_dist_file.repositories[args.repository_name]
        if not repo.source_repository:
            continue
        if not repo.source_repository.version:
            continue
        if build_file.test_commits_force is False:
            continue
        elif repo.source_repository.test_commits is False:
            continue
        elif repo.source_repository.test_commits is None and \
                not build_file.test_commits_default:
            continue
        used_source_build_names.append(source_build_name)

    doc_build_files = get_doc_build_files(config, args.rosdistro_name)
    doc_build_file = doc_build_files[args.doc_build_name]

    # create manifest.yaml files from repository / package meta information
    # will be merged with the manifest.yaml file generated by rosdoc_lite later
    repository = dist_file.repositories[args.repository_name]
    with Scope('SUBSECTION', 'create manifest.yaml files'):
        for pkg in pkgs.values():

            data = {}

            data['vcs'] = vcs_type
            data['vcs_uri'] = vcs_url
            data['vcs_version'] = vcs_version

            data['repo_name'] = args.repository_name
            data['timestamp'] = time.time()

            data['depends'] = sorted(
                rosdoc_index.forward_deps.get(pkg.name, []))
            data['depends_on'] = sorted(
                rosdoc_index.reverse_deps.get(pkg.name, []))

            if pkg.name in rosdoc_index.metapackage_index:
                data['metapackages'] = rosdoc_index.metapackage_index[pkg.name]

            if pkg.name in rosdoc_index.metapackage_deps:
                data['packages'] = rosdoc_index.metapackage_deps[pkg.name]

            if pkg.name in package_names_with_changelogs:
                data['has_changelog_rst'] = True

            data['api_documentation'] = '%s/%s/api/%s/html' % \
                (doc_build_file.canonical_base_url, args.rosdistro_name, pkg.name)

            pkg_status = None
            pkg_status_description = None
            # package level status information
            if pkg.name in repository.status_per_package:
                pkg_status_data = repository.status_per_package[pkg.name]
                pkg_status = pkg_status_data.get('status', None)
                pkg_status_description = pkg_status_data.get(
                    'status_description', None)
            # repository level status information
            if pkg_status is None:
                pkg_status = repository.status
            if pkg_status_description is None:
                pkg_status_description = repository.status_description
            if pkg_status is not None:
                data['maintainer_status'] = pkg_status
            if pkg_status_description is not None:
                data['maintainer_status_description'] = pkg_status_description

            # add doc job url
            data['doc_job'] = get_doc_job_url(config.jenkins_url,
                                              args.rosdistro_name,
                                              args.doc_build_name,
                                              args.repository_name,
                                              args.os_name, args.os_code_name,
                                              args.arch)

            # add devel job urls
            build_files = {}
            for build_name in used_source_build_names:
                build_files[build_name] = source_build_files[build_name]
            devel_job_urls = get_devel_job_urls(config.jenkins_url,
                                                build_files,
                                                args.rosdistro_name,
                                                args.repository_name)
            if devel_job_urls:
                data['devel_jobs'] = devel_job_urls

            # TODO this should reuse the logic from the job generation
            used_release_build_names = []
            for release_build_name, build_file in release_build_files.items():
                filtered_pkg_names = build_file.filter_packages([pkg.name])
                if not filtered_pkg_names:
                    continue
                matching_dist_file = get_distribution_file_matching_build_file(
                    index, args.rosdistro_name, build_file)
                repo = matching_dist_file.repositories[args.repository_name]
                if not repo.release_repository:
                    continue
                if not repo.release_repository.version:
                    continue
                used_release_build_names.append(release_build_name)

            # add release job urls
            build_files = {}
            for build_name in used_release_build_names:
                build_files[build_name] = release_build_files[build_name]
            release_job_urls = get_release_job_urls(config.jenkins_url,
                                                    build_files,
                                                    args.rosdistro_name,
                                                    pkg.name)
            if release_job_urls:
                data['release_jobs'] = release_job_urls

            # write manifest.yaml
            dst = os.path.join(args.output_dir, 'manifests', pkg.name,
                               'manifest.yaml')
            dst_dir = os.path.dirname(dst)
            if not os.path.exists(dst_dir):
                os.makedirs(dst_dir)
            with open(dst, 'w') as h:
                yaml.dump(data, h)

    # overwrite CMakeLists.txt files of each package
    with Scope('SUBSECTION',
               'overwrite CMakeLists.txt files to only generate messages'):
        for pkg_path, pkg in pkgs.items():
            abs_pkg_path = os.path.join(source_space, pkg_path)

            build_types = [
                e.content for e in pkg.exports if e.tagname == 'build_type'
            ]
            build_type_cmake = build_types and build_types[0] == 'cmake'

            data = {
                'package_name': pkg.name,
                'build_type_cmake': build_type_cmake,
            }
            content = expand_template('doc/CMakeLists.txt.em', data)
            print("Generating 'CMakeLists.txt' for package '%s'" % pkg.name)
            cmakelist_file = os.path.join(abs_pkg_path, 'CMakeLists.txt')
            with open(cmakelist_file, 'w') as h:
                h.write(content)

    with Scope('SUBSECTION', 'determine dependencies and generate Dockerfile'):
        # initialize rosdep view
        context = initialize_resolver(args.rosdistro_name, args.os_name,
                                      args.os_code_name)

        apt_cache = Cache()

        debian_pkg_names = [
            'build-essential',
            'openssh-client',
            'python3',
            'python3-yaml',
            'rsync',
            # the following are required by rosdoc_lite
            'doxygen',
            'python-catkin-pkg-modules',
            'python-epydoc',
            'python-kitchen',
            'python-rospkg',
            'python-sphinx',
            'python-yaml',
            # since catkin is not a run dependency but provides the setup files
            get_debian_package_name(args.rosdistro_name, 'catkin'),
            # rosdoc_lite does not work without genmsg being importable
            get_debian_package_name(args.rosdistro_name, 'genmsg'),
        ]
        if args.build_tool == 'colcon':
            debian_pkg_names.append('python3-colcon-ros')
        if 'actionlib_msgs' in pkg_names:
            # to document actions in other packages in the same repository
            debian_pkg_names.append(
                get_debian_package_name(args.rosdistro_name, 'actionlib_msgs'))
        print('Always install the following generic dependencies:')
        for debian_pkg_name in sorted(debian_pkg_names):
            print('  -', debian_pkg_name)

        debian_pkg_versions = {}

        # get build, run and doc dependencies and map them to binary packages
        depends = get_dependencies(pkgs.values(), 'build, run and doc',
                                   _get_build_run_doc_dependencies)
        debian_pkg_names_depends = resolve_names(depends, **context)
        debian_pkg_names_depends -= set(debian_pkg_names)
        debian_pkg_names += order_dependencies(debian_pkg_names_depends)
        missing_debian_pkg_names = []
        for debian_pkg_name in debian_pkg_names:
            try:
                debian_pkg_versions.update(
                    get_binary_package_versions(apt_cache, [debian_pkg_name]))
            except KeyError:
                missing_debian_pkg_names.append(debian_pkg_name)
        if missing_debian_pkg_names:
            # we allow missing dependencies to support basic documentation
            # of packages which use not released dependencies
            print(
                '# BEGIN SUBSECTION: MISSING DEPENDENCIES might result in failing build'
            )
            for debian_pkg_name in missing_debian_pkg_names:
                print("Could not find apt package '%s', skipping dependency" %
                      debian_pkg_name)
                debian_pkg_names.remove(debian_pkg_name)
            print('# END SUBSECTION')

        # generate Dockerfile
        data = {
            'os_name':
            args.os_name,
            'os_code_name':
            args.os_code_name,
            'arch':
            args.arch,
            'build_tool':
            doc_build_file.build_tool,
            'distribution_repository_urls':
            args.distribution_repository_urls,
            'distribution_repository_keys':
            get_distribution_repository_keys(
                args.distribution_repository_urls,
                args.distribution_repository_key_files),
            'rosdistro_name':
            args.rosdistro_name,
            'uid':
            get_user_id(),
            'dependencies':
            debian_pkg_names,
            'dependency_versions':
            debian_pkg_versions,
            'install_lists': [],
            'canonical_base_url':
            doc_build_file.canonical_base_url,
            'ordered_pkg_tuples':
            ordered_pkg_tuples,
            'rosdoc_config_files':
            rosdoc_config_files,
        }
        create_dockerfile('doc/doc_task.Dockerfile.em', data,
                          args.dockerfile_dir)
예제 #28
0
class RPCAPTCache(InChRootObject):
    # pylint: disable=too-many-public-methods
    def __init__(
            self,
            rfs,
            log,
            arch,
            notifier=None,
            norecommend=False,
            noauth=True):

        # pylint: disable=too-many-arguments

        sys.stdout = open(log, 'a', buffering=0)
        sys.stderr = open(log, 'a', buffering=0)
        self.logfile = open(log, 'a', buffering=0)

        InChRootObject.__init__(self, rfs)

        self.notifier = notifier
        config.set("APT::Architecture", arch)
        if norecommend:
            config.set("APT::Install-Recommends", "0")
        else:
            config.set("APT::Install-Recommends", "1")

        if noauth:
            config.set("APT::Get::AllowUnauthenticated", "1")
        else:
            config.set("APT::Get::AllowUnauthenticated", "0")

        self.cache = Cache(progress=ElbeOpProgress())
        self.cache.open(progress=ElbeOpProgress())

    def dbg_dump(self, filename):
        ts = time.localtime()
        filename = filename + (
            '_%02d%02d%02d' % (ts.tm_hour, ts.tm_min, ts.tm_sec))
        with open(filename, 'w') as dbg:
            for p in self.cache:
                dbg.write(
                    '%s %s %d %d %d %d %d %d %d %d %d %d %d %d\n' %
                    (p.name,
                     p.candidate.version,
                     p.marked_keep,
                     p.marked_delete,
                     p.marked_upgrade,
                     p.marked_downgrade,
                     p.marked_install,
                     p.marked_reinstall,
                     p.is_auto_installed,
                     p.is_installed,
                     p.is_auto_removable,
                     p.is_now_broken,
                     p.is_inst_broken,
                     p.is_upgradable))

    def get_sections(self):
        ret = list(set([p.section for p in self.cache]))
        ret.sort()
        return ret

    def get_pkglist(self, section):
        if section == 'all':
            ret = [APTPackage(p) for p in self.cache]
        else:
            ret = [APTPackage(p) for p in self.cache if p.section == section]

        return ret

    def mark_install(self, pkgname, version, from_user=True, nodeps=False):
        p = self.cache[pkgname]
        if version:
            p.candidate = p.versions[version]
        p.mark_install(auto_fix=not nodeps,
                       auto_inst=not nodeps,
                       from_user=from_user)

    def mark_install_devpkgs(self, ignore_pkgs, ignore_dev_pkgs):
        ignore_pkgs.discard('libc6')  # we don't want to ignore libc
        ignore_pkgs.discard('libstdc++5')
        ignore_pkgs.discard('libstdc++6')
        # list all debian src packages of all installed packages that don't
        # come from debootstrap
        src_list = [
            p.candidate.source_name for p in self.cache if (
                p.is_installed and p.name not in ignore_pkgs)]
        # go through all packages, remember package if its source package
        # matches one of the installed packages and the binary package is a
        # '-dev' package
        dev_list = [
            s for s in self.cache if (
                s.candidate.source_name in src_list and (
                    s.name.endswith('-dev')))]
        for p in dev_list:
            if p.name not in ignore_dev_pkgs:
                p.mark_install()
        # ensure that the symlinks package will be installed (it's needed for
        # fixing links inside the sysroot
        self.cache['symlinks'].mark_install()

        for p in ignore_dev_pkgs:
            self.cache[p].mark_delete()

    def cleanup(self, exclude_pkgs):
        for p in self.cache:
            if p.is_installed and not \
               p.is_auto_installed or \
               p.is_auto_removable:
                remove = True
                for x in exclude_pkgs:
                    if x == p.name:
                        remove = False
                if remove:
                    p.mark_delete(auto_fix=True, purge=True)

    def mark_upgrade(self, pkgname, version):
        p = self.cache[pkgname]
        if version:
            p.candidate = p.versions[version]
        p.mark_upgrade()

    def mark_delete(self, pkgname):
        p = self.cache[pkgname]
        p.mark_delete(purge=True)

    def mark_keep(self, pkgname, version):
        p = self.cache[pkgname]
        p.mark_keep()

    def update(self):
        self.cache.update(fetch_progress=ElbeAcquireProgress())
        self.cache.open(progress=ElbeOpProgress())

    def commit(self):
        os.environ["DEBIAN_FRONTEND"] = "noninteractive"
        os.environ["DEBONF_NONINTERACTIVE_SEEN"] = "true"
        self.cache.commit(ElbeAcquireProgress(),
                          ElbeInstallProgress(fileno=self.logfile.fileno()))
        self.cache.open(progress=ElbeOpProgress())

    def clear(self):
        self.cache.clear()

    def get_dependencies(self, pkgname):
        deps = getalldeps(self.cache, pkgname)
        return [APTPackage(p, cache=self.cache) for p in deps]

    def get_installed_pkgs(self, section='all'):
        # avoid DeprecationWarning: MD5Hash is deprecated, use Hashes instead
        # triggerd by python-apt
        with warnings.catch_warnings():
            warnings.filterwarnings("ignore", category=DeprecationWarning)
            if section == 'all':
                pl = [APTPackage(p) for p in self.cache if p.is_installed]
            else:
                pl = [APTPackage(p) for p in self.cache if (
                    p.section == section and p.is_installed)]
            return pl

    def get_fileindex(self):
        index = {}

        for p in self.cache:
            if p.is_installed:
                for f in p.installed_files:
                    index[f] = p.name

        return index

    def get_marked_install(self, section='all'):
        if section == 'all':
            ret = [APTPackage(p) for p in self.cache if p.marked_install]
        else:
            ret = [APTPackage(p) for p in self.cache if (p.section == section
                                                         and p.marked_install)]
        return ret

    def get_upgradeable(self, section='all'):
        if section == 'all':
            ret = [APTPackage(p) for p in self.cache if p.is_upgradable]
        else:
            ret = [APTPackage(p) for p in self.cache if (p.section == section
                                                         and p.is_upgradable)]
        return ret

    def upgrade(self, dist_upgrade=False):
        self.cache.upgrade(dist_upgrade)

    def get_changes(self):
        changes = self.cache.get_changes()
        return [APTPackage(p) for p in changes]

    def has_pkg(self, pkgname):
        return pkgname in self.cache

    def is_installed(self, pkgname):
        if pkgname not in self.cache:
            return False
        return self.cache[pkgname].is_installed

    def get_pkg(self, pkgname):
        return APTPackage(self.cache[pkgname])

    def get_pkgs(self, pkgname):
        return [
            APTPackage(
                self.cache[p]) for p in sorted(
                self.cache.keys()) if pkgname in p.lower()]

    def compare_versions(self, ver1, ver2):
        return version_compare(ver1, ver2)

    def download_binary(self, pkgname, path, version=None):
        p = self.cache[pkgname]
        if version is None:
            pkgver = p.installed
        else:
            pkgver = p.versions[version]
        # avoid DeprecationWarning:
        # "MD5Hash is deprecated, use Hashes instead"
        # triggerd by python-apt
        with warnings.catch_warnings():
            warnings.filterwarnings("ignore",
                                    category=DeprecationWarning)
            rel_filename = pkgver.fetch_binary(path,
                                               ElbeAcquireProgress())
            return self.rfs.fname(rel_filename)

    def download_source(self, pkgname, path, version=None):
        p = self.cache[pkgname]
        if version is None:
            pkgver = p.installed
        else:
            pkgver = p.versions[version]

        # avoid DeprecationWarning:
        # "MD5Hash is deprecated, use Hashes instead"
        # triggerd by python-apt
        with warnings.catch_warnings():
            warnings.filterwarnings("ignore",
                                    category=DeprecationWarning)
            rel_filename = pkgver.fetch_source(path,
                                               ElbeAcquireProgress(),
                                               unpack=False)
            return self.rfs.fname(rel_filename)
예제 #29
0
def main(argv=sys.argv[1:]):
    parser = argparse.ArgumentParser(
        description="Generate a 'Dockerfile' for building the binarydeb")
    add_argument_rosdistro_index_url(parser)
    add_argument_rosdistro_name(parser)
    add_argument_package_name(parser)
    add_argument_os_name(parser)
    add_argument_os_code_name(parser)
    add_argument_arch(parser)
    add_argument_distribution_repository_urls(parser)
    add_argument_distribution_repository_key_files(parser)
    add_argument_binarydeb_dir(parser)
    add_argument_dockerfile_dir(parser)
    add_argument_env_vars(parser)
    args = parser.parse_args(argv)

    debian_package_name = get_debian_package_name(args.rosdistro_name,
                                                  args.package_name)

    # get expected package version from rosdistro
    index = get_index(args.rosdistro_index_url)
    dist_file = get_distribution_file(index, args.rosdistro_name)
    assert args.package_name in dist_file.release_packages
    pkg = dist_file.release_packages[args.package_name]
    repo = dist_file.repositories[pkg.repository_name]
    package_version = repo.release_repository.version

    debian_package_version = package_version

    # build_binarydeb dependencies
    debian_pkg_names = ['apt-src']

    # add build dependencies from .dsc file
    dsc_file = get_dsc_file(args.binarydeb_dir, debian_package_name,
                            debian_package_version)
    debian_pkg_names += sorted(get_build_depends(dsc_file))

    # get versions for build dependencies
    apt_cache = Cache()
    debian_pkg_versions = get_binary_package_versions(apt_cache,
                                                      debian_pkg_names)

    # generate Dockerfile
    data = {
        'os_name':
        args.os_name,
        'os_code_name':
        args.os_code_name,
        'arch':
        args.arch,
        'uid':
        get_user_id(),
        'distribution_repository_urls':
        args.distribution_repository_urls,
        'distribution_repository_keys':
        get_distribution_repository_keys(
            args.distribution_repository_urls,
            args.distribution_repository_key_files),
        'build_environment_variables':
        args.env_vars,
        'dependencies':
        debian_pkg_names,
        'dependency_versions':
        debian_pkg_versions,
        'rosdistro_name':
        args.rosdistro_name,
        'package_name':
        args.package_name,
        'binarydeb_dir':
        args.binarydeb_dir,
    }
    create_dockerfile('release/binarydeb_task.Dockerfile.em', data,
                      args.dockerfile_dir)

    # output hints about necessary volumes to mount
    ros_buildfarm_basepath = os.path.normpath(
        os.path.join(os.path.dirname(__file__), '..', '..'))
    print('Mount the following volumes when running the container:')
    print('  -v %s:/tmp/ros_buildfarm:ro' % ros_buildfarm_basepath)
    print('  -v %s:/tmp/binarydeb' % args.binarydeb_dir)
예제 #30
0
파일: dump.py 프로젝트: atoz-chevara/elbe
def get_initvm_pkglist ():
    cache = Cache ()
    cache.open ()
    pkglist = [APTPackage (p) for p in cache if p.is_installed]
    pkglist.append ( APTPackage( cache ['elbe-bootstrap'] ) )
    return pkglist