示例#1
0
    def install_packages(self, rootdir, configdir, release, packages,
            verbose=False, cache_dir=None):
        '''Install packages into a sandbox (for apport-retrace).

        In order to work without any special permissions and without touching
        the running system, this should only download and unpack packages into
        the given root directory, not install them into the system.

        configdir points to a directory with by-release configuration files for
        the packaging system; this is completely dependent on the backend
        implementation, the only assumption is that this looks into
        configdir/release/, so that you can use retracing for multiple
        DistroReleases. As a special case, if configdir is None, it uses the
        current system configuration, and "release" is ignored.

        release is the value of the report's 'DistroRelease' field.

        packages is a list of ('packagename', 'version') tuples. If the version
        is None, it should install the most current available version.
        
        If cache_dir is given, then the downloaded packages will be stored
        there, to speed up subsequent retraces.

        Return a string with outdated packages, or None if all packages were
        installed.

        If something is wrong with the environment (invalid configuration,
        package servers down, etc.), this should raise a SystemError with a
        meaningful error message.
        '''
        if not configdir:
            apt_sources = '/etc/apt/sources.list'
        else:
            apt_sources = os.path.join(configdir, release, 'sources.list')
        if not os.path.exists(apt_sources):
            raise SystemError('%s does not exist' % apt_sources)

        # create apt sandbox
        if cache_dir:
            tmp_aptroot = False
            if configdir:
                aptroot = os.path.join(cache_dir, release, 'apt')
            else:
                aptroot = os.path.join(cache_dir, 'system', 'apt')
            try:
                os.makedirs(aptroot)
            except OSError:
                pass
        else:
            tmp_aptroot = True
            aptroot = tempfile.mkdtemp()

        self._build_apt_sandbox(aptroot, apt_sources)

        if verbose:
            fetchProgress = apt.progress.text.AcquireProgress()
        else:
            fetchProgress = apt.progress.base.AcquireProgress()
        c = apt.Cache(rootdir=os.path.abspath(aptroot))
        try:
            c.update(fetchProgress)
        except apt.cache.FetchFailedException as e:
            raise SystemError(str(e))
        c.open()

        obsolete = ''

        # mark packages for installation
        real_pkgs = set()
        for (pkg, ver) in packages:
            try:
                candidate = c[pkg].candidate
            except KeyError:
                candidate = None
            if not candidate:
                m = 'package %s does not exist, ignoring' % pkg
                obsolete += m + '\n'
                apport.warning(m)
                continue

            if ver and candidate.version != ver:
                w = '%s version %s required, but %s is available' % (pkg, ver, candidate.version)
                obsolete += w + '\n'
            real_pkgs.add(pkg)

            if candidate.architecture != 'all':
                if pkg + '-dbg' in c:
                    real_pkgs.add(pkg + '-dbg')
                elif pkg + '-dbgsym' in c:
                    real_pkgs.add(pkg + '-dbgsym')
                    if c[pkg + '-dbgsym'].candidate.version != candidate.version:
                        obsolete += 'outdated debug symbol package for %s: package version %s dbgsym version %s\n' % (
                                pkg, candidate.version, c[pkg + '-dbgsym'].candidate.version)

        for p in real_pkgs:
            c[p].mark_install(False, False)

        # fetch packages
        fetcher = apt.apt_pkg.Acquire(fetchProgress)
        try:
            res = c.fetch_archives(fetcher=fetcher)
        except apt.cache.FetchFailedException as e:
            apport.error('Package download error, try again later: %s', str(e))
            sys.exit(99) # transient error

        # unpack packages
        if verbose:
            print('Extracting downloaded debs...')
        for i in fetcher.items:
            subprocess.check_call(['dpkg', '-x', i.destfile, rootdir])
            real_pkgs.remove(os.path.basename(i.destfile).split('_', 1)[0])

        if tmp_aptroot:
            shutil.rmtree(aptroot)

        # check bookkeeping that apt fetcher really got everything
        assert not real_pkgs, 'apt fetcher did not fetch these packages: ' \
            + ' '.join(real_pkgs)

        # work around python-apt bug that causes parts of the Cache(rootdir=)
        # argument configuration to be persistent; this resets the apt
        # configuration to system defaults again
        apt.Cache(rootdir='/')
        self._apt_cache = None

        return obsolete
示例#2
0
    def install_packages(self,
                         rootdir,
                         configdir,
                         release,
                         packages,
                         verbose=False,
                         cache_dir=None,
                         permanent_rootdir=False,
                         architecture=None):
        '''Install packages into a sandbox (for apport-retrace).

        In order to work without any special permissions and without touching
        the running system, this should only download and unpack packages into
        the given root directory, not install them into the system.

        configdir points to a directory with by-release configuration files for
        the packaging system; this is completely dependent on the backend
        implementation, the only assumption is that this looks into
        configdir/release/, so that you can use retracing for multiple
        DistroReleases. As a special case, if configdir is None, it uses the
        current system configuration, and "release" is ignored.

        release is the value of the report's 'DistroRelease' field.

        packages is a list of ('packagename', 'version') tuples. If the version
        is None, it should install the most current available version.

        If cache_dir is given, then the downloaded packages will be stored
        there, to speed up subsequent retraces.

        If permanent_rootdir is True, then the sandbox created from the
        downloaded packages will be reused, to speed up subsequent retraces.

        If architecture is given, the sandbox will be created with packages of
        the given architecture (as specified in a report's "Architecture"
        field). If not given it defaults to the host system's architecture.

        Return a string with outdated packages, or None if all packages were
        installed.

        If something is wrong with the environment (invalid configuration,
        package servers down, etc.), this should raise a SystemError with a
        meaningful error message.
        '''
        if not configdir:
            apt_sources = '/etc/apt/sources.list'
            self.current_release_codename = self.get_distro_codename()
        else:
            # support architecture specific config, fall back to global config
            apt_sources = os.path.join(configdir, release, 'sources.list')
            if architecture:
                arch_apt_sources = os.path.join(configdir, release,
                                                architecture, 'sources.list')
                if os.path.exists(arch_apt_sources):
                    apt_sources = arch_apt_sources

            # set mirror for get_file_package()
            try:
                self.set_mirror(
                    self._get_primary_mirror_from_apt_sources(apt_sources))
            except SystemError as e:
                apport.warning('cannot determine mirror: %s' % str(e))

            # set current release code name for _distro_release_to_codename
            with open(os.path.join(configdir, release, 'codename')) as f:
                self.current_release_codename = f.read().strip()

        if not os.path.exists(apt_sources):
            raise SystemError('%s does not exist' % apt_sources)

        # create apt sandbox
        if cache_dir:
            tmp_aptroot = False
            if configdir:
                aptroot = os.path.join(cache_dir, release, 'apt')
            else:
                aptroot = os.path.join(cache_dir, 'system', 'apt')
            if not os.path.isdir(aptroot):
                os.makedirs(aptroot)
        else:
            tmp_aptroot = True
            aptroot = tempfile.mkdtemp()

        if architecture:
            apt.apt_pkg.config.set('APT::Architecture', architecture)
        else:
            apt.apt_pkg.config.set('APT::Architecture',
                                   self.get_system_architecture())

        if verbose:
            fetchProgress = apt.progress.text.AcquireProgress()
        else:
            fetchProgress = apt.progress.base.AcquireProgress()
        if not tmp_aptroot:
            c = self._sandbox_cache(aptroot, apt_sources, fetchProgress)
        else:
            self._build_apt_sandbox(aptroot, apt_sources)
            c = apt.Cache(rootdir=os.path.abspath(aptroot))
            try:
                c.update(fetchProgress)
            except apt.cache.FetchFailedException as e:
                raise SystemError(str(e))
            c.open()

        obsolete = ''

        src_records = apt.apt_pkg.SourceRecords()

        REPO = 'http://archive.zentyal.org/old'
        UBUNTU_REPO = 'http://archive.ubuntu.com/ubuntu'
        DBG_PKGS = {
            'libldb1': 'ldb',
            'libsope1': 'sope',
            'libc6': 'eglibc',
            'libwbxml2-0': 'wbxml2',
            'openchangeserver': 'openchange',
            'openchangeproxy': 'openchange',
            'libmapi0': 'openchange',
            'libmapistore0': 'openchange',
            'libmapiproxy0': 'openchange',
            'python-ldb': 'ldb',
            'samba': 'samba',
            'sogo': 'sogo',
            'sogo-openchange': 'sogo',
            'libtalloc2': 'talloc',
            'libtevent0': 'tevent',
            'libntdb1': 'ntdb'
        }

        # urls to overwrite dbg packages with proper version
        dbg_pkg_urls = {}
        pkgs_not_found = []

        # mark packages for installation
        real_pkgs = set()
        for (pkg, ver) in packages:
            if not ver: continue
            if pkg in DBG_PKGS.keys():
                source = DBG_PKGS[pkg]
                for pname in [pkg, pkg + '-dbg']:
                    for repo in (REPO, UBUNTU_REPO):
                        filename = pname + '_' + re.sub('^.*:', '',
                                                        ver) + '_amd64.deb'
                        url = repo + '/pool/main/' + source[
                            0:1] + '/' + source + '/' + filename
                        if requests.head(url).status_code == 200:
                            dbg_pkg_urls[pname] = url
                            break
                        elif repo == UBUNTU_REPO and pname[-4:] != '-dbg':
                            pkgs_not_found.append(filename)

            try:
                candidate = c[pkg].candidate
            except KeyError:
                candidate = None
            if not candidate:
                m = 'package %s does not exist, ignoring' % pkg.replace(
                    '%', '%%')
                obsolete += m + '\n'
                apport.warning(m)
                continue

            if ver and candidate.version != ver:
                w = '%s version %s required, but %s is available' % (
                    pkg, ver, candidate.version)
                obsolete += w + '\n'
            real_pkgs.add(pkg)

            if permanent_rootdir:
                virtual_mapping = self._virtual_mapping(aptroot)
                # Remember all the virtual packages that this package provides,
                # so that if we encounter that virtual package as a
                # Conflicts/Replaces later, we know to remove this package from
                # the cache.
                for p in candidate.provides:
                    virtual_mapping.setdefault(p, set()).add(pkg)
                conflicts = []
                if 'Conflicts' in candidate.record:
                    conflicts += apt.apt_pkg.parse_depends(
                        candidate.record['Conflicts'])
                if 'Replaces' in candidate.record:
                    conflicts += apt.apt_pkg.parse_depends(
                        candidate.record['Replaces'])
                archives = apt.apt_pkg.config.find_dir('Dir::Cache::archives')
                for conflict in conflicts:
                    # apt_pkg.parse_depends needs to handle the or operator,
                    # but as policy states it is invalid to use that in
                    # Replaces/Depends, we can safely choose the first value
                    # here.
                    conflict = conflict[0]
                    if c.is_virtual_package(conflict[0]):
                        try:
                            providers = virtual_mapping[conflict[0]]
                        except KeyError:
                            # We may not have seen the virtual package that
                            # this conflicts with, so we can assume it's not
                            # unpacked into the sandbox.
                            continue
                        for p in providers:
                            debs = os.path.join(archives, '%s_*.deb' % p)
                            for path in glob.glob(debs):
                                ver = self._deb_version(path)
                                if apt.apt_pkg.check_dep(
                                        ver, conflict[2], conflict[1]):
                                    os.unlink(path)
                        del providers
                    else:
                        debs = os.path.join(archives, '%s_*.deb' % conflict[0])
                        for path in glob.glob(debs):
                            ver = self._deb_version(path)
                            if apt.apt_pkg.check_dep(ver, conflict[2],
                                                     conflict[1]):
                                os.unlink(path)

            if candidate.architecture != 'all':
                if pkg + '-dbg' in c:
                    real_pkgs.add(pkg + '-dbg')
                else:
                    # install all -dbg from the source package
                    if src_records.lookup(candidate.source_name):
                        dbgs = [
                            p for p in src_records.binaries
                            if p.endswith('-dbg') and p in c
                        ]
                    else:
                        dbgs = []
                    if dbgs:
                        for p in dbgs:
                            real_pkgs.add(p)
                    else:
                        if pkg + '-dbgsym' in c:
                            real_pkgs.add(pkg + '-dbgsym')
                            if c[pkg +
                                 '-dbgsym'].candidate.version != candidate.version:
                                obsolete += 'outdated debug symbol package for %s: package version %s dbgsym version %s\n' % (
                                    pkg, candidate.version,
                                    c[pkg + '-dbgsym'].candidate.version)

        if pkgs_not_found:
            print "Aborting retrace as some packages cannot be found in the repos:"
            print "\n".join(pkgs_not_found)
            sys.exit(1)

        for p in real_pkgs:
            c[p].mark_install(False, False)

        last_written = time.time()
        # fetch packages
        fetcher = apt.apt_pkg.Acquire(fetchProgress)
        try:
            c.fetch_archives(fetcher=fetcher)
        except apt.cache.FetchFailedException as e:
            apport.error('Package download error, try again later: %s', str(e))
            sys.exit(99)  # transient error

        # FIXME: unhardcode path
        pkgs_path = cache_dir + '/Ubuntu 14.04/apt/var/cache/apt/archives'
        debs_to_unpack = []
        for pname, url in dbg_pkg_urls.iteritems():
            # Remove other versions before downloading the proper one
            for i in glob.glob(os.path.join(pkgs_path, '%s_*.deb' % pname)):
                #print "DELETING AUTO-FETCHED DEB: " + i
                os.unlink(i)
            #print "DOWNLOADING URL: " + url + "\n";
            urlretrieve(url, pkgs_path + '/' + os.path.basename(url))
            debs_to_unpack.append(pkgs_path + '/' + os.path.basename(url))

        # unpack packages
        if verbose:
            print('Extracting downloaded debs...')
        for i in fetcher.items:
            if not permanent_rootdir or os.path.getctime(
                    i.destfile) > last_written:
                if os.path.isfile(i.destfile):
                    subprocess.check_call(['dpkg', '-x', i.destfile, rootdir])
            real_pkgs.remove(os.path.basename(i.destfile).split('_', 1)[0])

        for debfile in debs_to_unpack:
            #print "UNPACKING NEW DOWNLOADED DEB: " + debfile + " IN: " + rootdir
            subprocess.check_call(['dpkg', '-x', debfile, rootdir])

        if tmp_aptroot:
            shutil.rmtree(aptroot)

        # check bookkeeping that apt fetcher really got everything
        assert not real_pkgs, 'apt fetcher did not fetch these packages: ' \
            + ' '.join(real_pkgs)

        if permanent_rootdir:
            self._save_virtual_mapping(aptroot)

        return obsolete
示例#3
0
    def install_packages(self, rootdir, configdir, release, packages,
                         verbose=False, cache_dir=None,
                         permanent_rootdir=False, architecture=None):
        '''Install packages into a sandbox (for apport-retrace).

        In order to work without any special permissions and without touching
        the running system, this should only download and unpack packages into
        the given root directory, not install them into the system.

        configdir points to a directory with by-release configuration files for
        the packaging system; this is completely dependent on the backend
        implementation, the only assumption is that this looks into
        configdir/release/, so that you can use retracing for multiple
        DistroReleases. As a special case, if configdir is None, it uses the
        current system configuration, and "release" is ignored.

        release is the value of the report's 'DistroRelease' field.

        packages is a list of ('packagename', 'version') tuples. If the version
        is None, it should install the most current available version.

        If cache_dir is given, then the downloaded packages will be stored
        there, to speed up subsequent retraces.

        If permanent_rootdir is True, then the sandbox created from the
        downloaded packages will be reused, to speed up subsequent retraces.

        If architecture is given, the sandbox will be created with packages of
        the given architecture (as specified in a report's "Architecture"
        field). If not given it defaults to the host system's architecture.

        Return a string with outdated packages, or None if all packages were
        installed.

        If something is wrong with the environment (invalid configuration,
        package servers down, etc.), this should raise a SystemError with a
        meaningful error message.
        '''
        if not configdir:
            apt_sources = '/etc/apt/sources.list'
            self.current_release_codename = self.get_distro_codename()
        else:
            # support architecture specific config, fall back to global config
            apt_sources = os.path.join(configdir, release, 'sources.list')
            if architecture:
                arch_apt_sources = os.path.join(configdir, release,
                                                architecture, 'sources.list')
                if os.path.exists(arch_apt_sources):
                    apt_sources = arch_apt_sources

            # set mirror for get_file_package()
            try:
                self.set_mirror(self._get_primary_mirror_from_apt_sources(apt_sources))
            except SystemError as e:
                apport.warning('cannot determine mirror: %s' % str(e))

            # set current release code name for _distro_release_to_codename
            with open(os.path.join(configdir, release, 'codename')) as f:
                self.current_release_codename = f.read().strip()

        if not os.path.exists(apt_sources):
            raise SystemError('%s does not exist' % apt_sources)

        # create apt sandbox
        if cache_dir:
            tmp_aptroot = False
            if configdir:
                aptroot = os.path.join(cache_dir, release, 'apt')
            else:
                aptroot = os.path.join(cache_dir, 'system', 'apt')
            if not os.path.isdir(aptroot):
                os.makedirs(aptroot)
        else:
            tmp_aptroot = True
            aptroot = tempfile.mkdtemp()

        if architecture:
            apt.apt_pkg.config.set('APT::Architecture', architecture)
        else:
            apt.apt_pkg.config.set('APT::Architecture', self.get_system_architecture())

        if verbose:
            fetchProgress = apt.progress.text.AcquireProgress()
        else:
            fetchProgress = apt.progress.base.AcquireProgress()
        if not tmp_aptroot:
            c = self._sandbox_cache(aptroot, apt_sources, fetchProgress)
        else:
            self._build_apt_sandbox(aptroot, apt_sources)
            c = apt.Cache(rootdir=os.path.abspath(aptroot))
            try:
                c.update(fetchProgress)
            except apt.cache.FetchFailedException as e:
                raise SystemError(str(e))
            c.open()

        obsolete = ''

        src_records = apt.apt_pkg.SourceRecords()

        # mark packages for installation
        real_pkgs = set()
        for (pkg, ver) in packages:
            try:
                candidate = c[pkg].candidate
            except KeyError:
                candidate = None
            if not candidate:
                m = 'package %s does not exist, ignoring' % pkg.replace('%', '%%')
                obsolete += m + '\n'
                apport.warning(m)
                continue

            if ver and candidate.version != ver:
                w = '%s version %s required, but %s is available' % (pkg, ver, candidate.version)
                obsolete += w + '\n'
            real_pkgs.add(pkg)

            if permanent_rootdir:
                virtual_mapping = self._virtual_mapping(aptroot)
                # Remember all the virtual packages that this package provides,
                # so that if we encounter that virtual package as a
                # Conflicts/Replaces later, we know to remove this package from
                # the cache.
                for p in candidate.provides:
                    virtual_mapping.setdefault(p, set()).add(pkg)
                conflicts = []
                if 'Conflicts' in candidate.record:
                    conflicts += apt.apt_pkg.parse_depends(candidate.record['Conflicts'])
                if 'Replaces' in candidate.record:
                    conflicts += apt.apt_pkg.parse_depends(candidate.record['Replaces'])
                archives = apt.apt_pkg.config.find_dir('Dir::Cache::archives')
                for conflict in conflicts:
                    # apt_pkg.parse_depends needs to handle the or operator,
                    # but as policy states it is invalid to use that in
                    # Replaces/Depends, we can safely choose the first value
                    # here.
                    conflict = conflict[0]
                    if c.is_virtual_package(conflict[0]):
                        try:
                            providers = virtual_mapping[conflict[0]]
                        except KeyError:
                            # We may not have seen the virtual package that
                            # this conflicts with, so we can assume it's not
                            # unpacked into the sandbox.
                            continue
                        for p in providers:
                            debs = os.path.join(archives, '%s_*.deb' % p)
                            for path in glob.glob(debs):
                                ver = self._deb_version(path)
                                if apt.apt_pkg.check_dep(ver, conflict[2], conflict[1]):
                                    os.unlink(path)
                        del providers
                    else:
                        debs = os.path.join(archives, '%s_*.deb' % conflict[0])
                        for path in glob.glob(debs):
                            ver = self._deb_version(path)
                            if apt.apt_pkg.check_dep(ver, conflict[2], conflict[1]):
                                os.unlink(path)

            if candidate.architecture != 'all':
                if pkg + '-dbg' in c:
                    real_pkgs.add(pkg + '-dbg')
                else:
                    # install all -dbg from the source package
                    if src_records.lookup(candidate.source_name):
                        dbgs = [p for p in src_records.binaries if p.endswith('-dbg') and p in c]
                    else:
                        dbgs = []
                    if dbgs:
                        for p in dbgs:
                            real_pkgs.add(p)
                    else:
                        if pkg + '-dbgsym' in c:
                            real_pkgs.add(pkg + '-dbgsym')
                            if c[pkg + '-dbgsym'].candidate.version != candidate.version:
                                obsolete += 'outdated debug symbol package for %s: package version %s dbgsym version %s\n' % (
                                    pkg, candidate.version, c[pkg + '-dbgsym'].candidate.version)

        for p in real_pkgs:
            c[p].mark_install(False, False)

        last_written = time.time()
        # fetch packages
        fetcher = apt.apt_pkg.Acquire(fetchProgress)
        try:
            c.fetch_archives(fetcher=fetcher)
        except apt.cache.FetchFailedException as e:
            apport.error('Package download error, try again later: %s', str(e))
            sys.exit(99)  # transient error

        # unpack packages
        if verbose:
            print('Extracting downloaded debs...')
        for i in fetcher.items:
            if not permanent_rootdir or os.path.getctime(i.destfile) > last_written:
                subprocess.check_call(['dpkg', '-x', i.destfile, rootdir])
            real_pkgs.remove(os.path.basename(i.destfile).split('_', 1)[0])

        if tmp_aptroot:
            shutil.rmtree(aptroot)

        # check bookkeeping that apt fetcher really got everything
        assert not real_pkgs, 'apt fetcher did not fetch these packages: ' \
            + ' '.join(real_pkgs)

        if permanent_rootdir:
            self._save_virtual_mapping(aptroot)

        return obsolete
示例#4
0
def make_sandbox(report,
                 config_dir,
                 cache_dir=None,
                 sandbox_dir=None,
                 extra_packages=[],
                 verbose=False,
                 log_timestamps=False):
    '''Build a sandbox with the packages that belong to a particular report.

    This downloads and unpacks all packages from the report's Package and
    Dependencies fields, plus all packages that ship the files from ProcMaps
    (often, runtime plugins do not appear in Dependencies), plus optionally
    some extra ones, for the distro release and architecture of the report.

    For unpackaged executables, there are no Dependencies. Packages for shared
    libaries are unpacked.

    report is an apport.Report object to build a sandbox for. Presence of the
    Package field determines whether to determine dependencies through
    packaging (via the optional report['Dependencies'] field), or through ldd
    via needed_runtime_packages() -> shared_libraries().  Usually
    report['Architecture'] and report['Uname'] are present.

    config_dir points to a directory with by-release configuration files for
    the packaging system, or "system"; this is passed to
    apport.packaging.install_packages(), see that method for details.

    cache_dir points to a directory where the downloaded packages and debug
    symbols are kept, which is useful if you create sandboxes very often. If
    not given, the downloaded packages get deleted at program exit.

    sandbox_dir points to a directory with a permanently unpacked sandbox with
    the already unpacked packages. This speeds up operations even further if
    you need to create sandboxes for different reports very often; but the
    sandboxes can become very big over time, and you must ensure that an
    already existing sandbox matches the DistroRelease: and Architecture: of
    report. If not given, a temporary directory will be created which gets
    deleted at program exit.

    extra_packages can specify a list of additional packages to install which
    are not derived from the report.

    If verbose is True (False by default), this will write some additional
    logging to stdout. If log_timestamps is True, these log messages will be
    prefixed with the current time.

    Return a tuple (sandbox_dir, cache_dir, outdated_msg).
    '''
    # sandbox
    if sandbox_dir:
        sandbox_dir = os.path.abspath(sandbox_dir)
        if not os.path.isdir(sandbox_dir):
            os.makedirs(sandbox_dir)
        permanent_rootdir = True
    else:
        sandbox_dir = tempfile.mkdtemp(prefix='apport_sandbox_')
        atexit.register(shutil.rmtree, sandbox_dir)
        permanent_rootdir = False

    # cache
    if cache_dir:
        cache_dir = os.path.abspath(cache_dir)
    else:
        cache_dir = tempfile.mkdtemp(prefix='apport_cache_')
        atexit.register(shutil.rmtree, cache_dir)

    pkgs = []

    # when ProcMaps is available and we don't have any third-party packages, it
    # is enough to get the libraries in it and map their files to packages;
    # otherwise, get Package/Dependencies
    if 'ProcMaps' not in report or '[origin' in (
            report.get('Package', '') + report.get('Dependencies', '')):
        pkgs = needed_packages(report)

    # add user-specified extra packages, if any
    for p in extra_packages:
        pkgs.append((p, None))
    if config_dir == 'system':
        config_dir = None

    # unpack packages, if any, using cache and sandbox
    try:
        outdated_msg = apport.packaging.install_packages(
            sandbox_dir,
            config_dir,
            report['DistroRelease'],
            pkgs,
            verbose,
            cache_dir,
            permanent_rootdir,
            architecture=report.get('Architecture'))
    except SystemError as e:
        sys.stderr.write(str(e) + '\n')
        sys.exit(1)

    pkgs = needed_runtime_packages(report, sandbox_dir, cache_dir, verbose)

    # package hooks might reassign Package:, check that we have the originally
    # crashing binary
    for path in ('InterpreterPath', 'ExecutablePath'):
        if path in report and not os.path.exists(sandbox_dir + report[path]):
            pkg = apport.packaging.get_file_package(
                report[path],
                True,
                cache_dir,
                release=report['DistroRelease'],
                arch=report.get('Architecture'))
            if pkg:
                apport.log(
                    'Installing extra package %s to get %s' % (pkg, path),
                    log_timestamps)
                pkgs.append((pkg, None))
            else:
                apport.warning('Cannot find package which ships %s', path)

    # unpack packages for executable using cache and sandbox
    if pkgs:
        try:
            outdated_msg += apport.packaging.install_packages(
                sandbox_dir,
                config_dir,
                report['DistroRelease'],
                pkgs,
                cache_dir=cache_dir,
                architecture=report.get('Architecture'))
        except SystemError as e:
            sys.stderr.write(str(e) + '\n')
            sys.exit(1)

    # sanity check: for a packaged binary we require having the executable in
    # the sandbox; TODO: for an unpackage binary we don't currently copy its
    # potential local library dependencies (like those in build trees) into the
    # sandbox, and we call gdb/valgrind on the binary outside the sandbox.
    if 'Package' in report:
        for path in ('InterpreterPath', 'ExecutablePath'):
            if path in report and not os.path.exists(sandbox_dir +
                                                     report[path]):
                apport.error(
                    '%s %s does not exist (report specified package %s)', path,
                    sandbox_dir + report[path], report['Package'])
                sys.exit(0)

    if outdated_msg:
        report['RetraceOutdatedPackages'] = outdated_msg

    apport.memdbg('built sandbox')

    return sandbox_dir, cache_dir, outdated_msg
示例#5
0
文件: report.py 项目: YECharles/Peach
    def add_hooks_info(self, ui, package=None, srcpackage=None):
        '''Run hook script for collecting package specific data.

        A hook script needs to be in _hook_dir/<Package>.py or in
        _common_hook_dir/*.py and has to contain a function 'add_info(report,
        ui)' that takes and modifies a Report, and gets an UserInterface
        reference for interactivity.

        return True if the hook requested to stop the report filing process,
        False otherwise.
        '''
        symb = {}

        # common hooks
        for hook in glob.glob(_common_hook_dir + '/*.py'):
            try:
                with open(hook) as fd:
                    exec(compile(fd.read(), hook, 'exec'), symb)
                try:
                    symb['add_info'](self, ui)
                except TypeError as e:
                    if str(e).startswith('add_info()'):
                        # older versions of apport did not pass UI, and hooks that
                        # do not require it don't need to take it
                        symb['add_info'](self)
                    else:
                        raise
            except StopIteration:
                return True
            except:
                apport.error('hook %s crashed:', hook)
                traceback.print_exc()
                pass

        # binary package hook
        #if not package:
        #    package = self.get('Package')
        #if package:
        #    hook = '%s/%s.py' % (_hook_dir, package.split()[0])
        #    if os.path.exists(hook):
        #        try:
        #            with open(hook) as fd:
        #                exec(compile(fd.read(), hook, 'exec'), symb)
        #            try:
        #                symb['add_info'](self, ui)
        #            except TypeError as e:
        #                if str(e).startswith('add_info()'):
        #                    # older versions of apport did not pass UI, and hooks that
        #                    # do not require it don't need to take it
        #                    symb['add_info'](self)
        #                else:
        #                    raise
        #        except StopIteration:
        #            return True
        #        except:
        #            apport.error('hook %s crashed:', hook)
        #            traceback.print_exc()
        #            pass

        # source package hook
        #if not srcpackage:
        #    srcpackage = self.get('SourcePackage')
        #if srcpackage:
        #    hook = '%s/source_%s.py' % (_hook_dir, srcpackage.split()[0])
        #    if os.path.exists(hook):
        #        try:
        #            with open(hook) as fd:
        #                exec(compile(fd.read(), hook, 'exec'), symb)
        #            try:
        #                symb['add_info'](self, ui)
        #            except TypeError as e:
        #                if str(e).startswith('add_info()'):
        #                    # older versions of apport did not pass UI, and hooks that
        #                    # do not require it don't need to take it
        #                    symb['add_info'](self)
        #                else:
        #                    raise
        #        except StopIteration:
        #            return True
        #        except:
        #            apport.error('hook %s crashed:', hook)
        #            traceback.print_exc()
        #            pass

        return False
示例#6
0
    def install_packages(self, rootdir, configdir, release, packages,
                         verbose=False, cache_dir=None,
                         permanent_rootdir=False, architecture=None):
        '''Install packages into a sandbox (for apport-retrace).

        In order to work without any special permissions and without touching
        the running system, this should only download and unpack packages into
        the given root directory, not install them into the system.

        configdir points to a directory with by-release configuration files for
        the packaging system; this is completely dependent on the backend
        implementation, the only assumption is that this looks into
        configdir/release/, so that you can use retracing for multiple
        DistroReleases. As a special case, if configdir is None, it uses the
        current system configuration, and "release" is ignored.

        release is the value of the report's 'DistroRelease' field.

        packages is a list of ('packagename', 'version') tuples. If the version
        is None, it should install the most current available version.

        If cache_dir is given, then the downloaded packages will be stored
        there, to speed up subsequent retraces.

        If permanent_rootdir is True, then the sandbox created from the
        downloaded packages will be reused, to speed up subsequent retraces.

        If architecture is given, the sandbox will be created with packages of
        the given architecture (as specified in a report's "Architecture"
        field). If not given it defaults to the host system's architecture.

        Return a string with outdated packages, or None if all packages were
        installed.

        If something is wrong with the environment (invalid configuration,
        package servers down, etc.), this should raise a SystemError with a
        meaningful error message.
        '''
        if not configdir:
            apt_sources = '/etc/apt/sources.list'
            self.current_release_codename = self.get_distro_codename()
        else:
            # support architecture specific config, fall back to global config
            apt_sources = os.path.join(configdir, release, 'sources.list')
            if architecture:
                arch_apt_sources = os.path.join(configdir, release,
                                                architecture, 'sources.list')
                if os.path.exists(arch_apt_sources):
                    apt_sources = arch_apt_sources

            # set mirror for get_file_package()
            try:
                self.set_mirror(self._get_primary_mirror_from_apt_sources(apt_sources))
            except SystemError as e:
                apport.warning('cannot determine mirror: %s' % str(e))

            # set current release code name for _distro_release_to_codename
            with open(os.path.join(configdir, release, 'codename')) as f:
                self.current_release_codename = f.read().strip()

        if not os.path.exists(apt_sources):
            raise SystemError('%s does not exist' % apt_sources)

        # create apt sandbox
        if cache_dir:
            tmp_aptroot = False
            if configdir:
                aptroot = os.path.join(cache_dir, release, 'apt')
            else:
                aptroot = os.path.join(cache_dir, 'system', 'apt')
            if not os.path.isdir(aptroot):
                os.makedirs(aptroot)
        else:
            tmp_aptroot = True
            aptroot = tempfile.mkdtemp()

        if architecture:
            apt.apt_pkg.config.set('APT::Architecture', architecture)
        else:
            apt.apt_pkg.config.set('APT::Architecture', self.get_system_architecture())

        if verbose:
            fetchProgress = apt.progress.text.AcquireProgress()
        else:
            fetchProgress = apt.progress.base.AcquireProgress()
        if not tmp_aptroot:
            c = self._sandbox_cache(aptroot, apt_sources, fetchProgress)
        else:
            self._build_apt_sandbox(aptroot, apt_sources)
            c = apt.Cache(rootdir=os.path.abspath(aptroot))
            try:
                c.update(fetchProgress)
            except apt.cache.FetchFailedException as e:
                raise SystemError(str(e))
            c.open()

        obsolete = ''

        src_records = apt.apt_pkg.SourceRecords()

        REPO = 'http://archive.zentyal.org/old'
        UBUNTU_REPO = 'http://archive.ubuntu.com/ubuntu'
        DBG_PKGS = {'libldb1': 'ldb',
                    'libsope1': 'sope',
                    'libc6': 'eglibc',
                    'libwbclient0': 'samba',
                    'libwbxml2-0': 'wbxml2',
                    'openchangeserver': 'openchange',
                    'openchangeproxy': 'openchange',
                    'libmapi0': 'openchange',
                    'libmapistore0': 'openchange',
                    'libmapiproxy0': 'openchange',
                    'libnss-winbind': 'samba',
                    'libpam-winbind': 'samba',
                    'python-ldb': 'ldb',
                    'python-talloc': 'talloc',
                    'samba': 'samba',
                    'samba-common-bin': 'samba',
                    'samba-dsdb-modules': 'samba',
                    'samba-libs': 'samba',
                    'samba-testsuite': 'samba',
                    'samba-vfs-modules': 'samba',
                    'sogo': 'sogo',
                    'sogo-openchange': 'sogo',
                    'libtalloc2': 'talloc',
                    'libtevent0': 'tevent',
                    'libntdb1': 'ntdb',
                    'winbind': 'samba'}

        # urls to overwrite dbg packages with proper version
        dbg_pkg_urls = {}
        pkgs_not_found = []

        # mark packages for installation
        real_pkgs = set()
        for (pkg, ver) in packages:
            if not ver: continue
            if pkg in DBG_PKGS.keys():
                source = DBG_PKGS[pkg]
                for pname in [pkg, pkg + '-dbg']:
                    for repo in (REPO, UBUNTU_REPO):
                        filename = pname + '_' + re.sub('^.*:', '', ver) + '_amd64.deb'
                        url = repo + '/pool/main/' + source[0:1] + '/' + source + '/' + filename
                        if requests.head(url).status_code == 200:
                            dbg_pkg_urls[pname] = url
                            break
                        elif repo == UBUNTU_REPO and pname[-4:] != '-dbg':
                            pkgs_not_found.append(filename)

            try:
                candidate = c[pkg].candidate
            except KeyError:
                candidate = None
            if not candidate:
                m = 'package %s does not exist, ignoring' % pkg.replace('%', '%%')
                obsolete += m + '\n'
                apport.warning(m)
                continue

            if ver and candidate.version != ver:
                w = '%s version %s required, but %s is available' % (pkg, ver, candidate.version)
                obsolete += w + '\n'
            real_pkgs.add(pkg)

            if permanent_rootdir:
                virtual_mapping = self._virtual_mapping(aptroot)
                # Remember all the virtual packages that this package provides,
                # so that if we encounter that virtual package as a
                # Conflicts/Replaces later, we know to remove this package from
                # the cache.
                for p in candidate.provides:
                    virtual_mapping.setdefault(p, set()).add(pkg)
                conflicts = []
                if 'Conflicts' in candidate.record:
                    conflicts += apt.apt_pkg.parse_depends(candidate.record['Conflicts'])
                if 'Replaces' in candidate.record:
                    conflicts += apt.apt_pkg.parse_depends(candidate.record['Replaces'])
                archives = apt.apt_pkg.config.find_dir('Dir::Cache::archives')
                for conflict in conflicts:
                    # apt_pkg.parse_depends needs to handle the or operator,
                    # but as policy states it is invalid to use that in
                    # Replaces/Depends, we can safely choose the first value
                    # here.
                    conflict = conflict[0]
                    if c.is_virtual_package(conflict[0]):
                        try:
                            providers = virtual_mapping[conflict[0]]
                        except KeyError:
                            # We may not have seen the virtual package that
                            # this conflicts with, so we can assume it's not
                            # unpacked into the sandbox.
                            continue
                        for p in providers:
                            debs = os.path.join(archives, '%s_*.deb' % p)
                            for path in glob.glob(debs):
                                ver = self._deb_version(path)
                                if apt.apt_pkg.check_dep(ver, conflict[2], conflict[1]):
                                    os.unlink(path)
                        del providers
                    else:
                        debs = os.path.join(archives, '%s_*.deb' % conflict[0])
                        for path in glob.glob(debs):
                            ver = self._deb_version(path)
                            if apt.apt_pkg.check_dep(ver, conflict[2], conflict[1]):
                                os.unlink(path)

            if candidate.architecture != 'all':
                if pkg + '-dbg' in c:
                    real_pkgs.add(pkg + '-dbg')
                else:
                    # install all -dbg from the source package
                    if src_records.lookup(candidate.source_name):
                        dbgs = [p for p in src_records.binaries if p.endswith('-dbg') and p in c]
                    else:
                        dbgs = []
                    if dbgs:
                        for p in dbgs:
                            real_pkgs.add(p)
                    else:
                        if pkg + '-dbgsym' in c:
                            real_pkgs.add(pkg + '-dbgsym')
                            if c[pkg + '-dbgsym'].candidate.version != candidate.version:
                                obsolete += 'outdated debug symbol package for %s: package version %s dbgsym version %s\n' % (
                                    pkg, candidate.version, c[pkg + '-dbgsym'].candidate.version)

        if pkgs_not_found:
            print "Aborting retrace as some packages cannot be found in the repos:"
            print "\n".join(pkgs_not_found)
            sys.exit(1)

        for p in real_pkgs:
            c[p].mark_install(False, False)

        last_written = time.time()
        # fetch packages
        fetcher = apt.apt_pkg.Acquire(fetchProgress)
        try:
            c.fetch_archives(fetcher=fetcher)
        except apt.cache.FetchFailedException as e:
            apport.error('Package download error, try again later: %s', str(e))
            sys.exit(99)  # transient error

        # FIXME: unhardcode path
        pkgs_path = cache_dir + '/Ubuntu 14.04/apt/var/cache/apt/archives'
        debs_to_unpack = []
        for pname, url in dbg_pkg_urls.iteritems():
            # Remove other versions before downloading the proper one
            for i in glob.glob(os.path.join(pkgs_path, '%s_*.deb' % pname)):
                #print "DELETING AUTO-FETCHED DEB: " + i
                os.unlink(i)
            #print "DOWNLOADING URL: " + url + "\n";
            urlretrieve(url, pkgs_path + '/' + os.path.basename(url))
            debs_to_unpack.append(pkgs_path + '/' + os.path.basename(url))

        # unpack packages
        if verbose:
            print('Extracting downloaded debs...')
        for i in fetcher.items:
            if not permanent_rootdir or os.path.getctime(i.destfile) > last_written:
                if os.path.isfile(i.destfile):
                    subprocess.check_call(['dpkg', '-x', i.destfile, rootdir])
            real_pkgs.remove(os.path.basename(i.destfile).split('_', 1)[0])

        for debfile in debs_to_unpack:
            #print "UNPACKING NEW DOWNLOADED DEB: " + debfile + " IN: " + rootdir
            subprocess.check_call(['dpkg', '-x', debfile, rootdir])

        if tmp_aptroot:
            shutil.rmtree(aptroot)

        # check bookkeeping that apt fetcher really got everything
        assert not real_pkgs, 'apt fetcher did not fetch these packages: ' \
            + ' '.join(real_pkgs)

        if permanent_rootdir:
            self._save_virtual_mapping(aptroot)

        return obsolete
示例#7
0
def make_sandbox(report, config_dir, cache_dir=None, sandbox_dir=None,
                 extra_packages=[], verbose=False, log_timestamps=False):
    '''Build a sandbox with the packages that belong to a particular report.

    This downloads and unpacks all packages from the report's Package and
    Dependencies fields, plus all packages that ship the files from ProcMaps
    (often, runtime plugins do not appear in Dependencies), plus optionally
    some extra ones, for the distro release and architecture of the report.

    For unpackaged executables, there are no Dependencies. Packages for shared
    libaries are unpacked.

    report is an apport.Report object to build a sandbox for. Presence of the
    Package field determines whether to determine dependencies through
    packaging (via the optional report['Dependencies'] field), or through ldd
    via needed_runtime_packages() -> shared_libraries().  Usually
    report['Architecture'] and report['Uname'] are present.

    config_dir points to a directory with by-release configuration files for
    the packaging system, or "system"; this is passed to
    apport.packaging.install_packages(), see that method for details.

    cache_dir points to a directory where the downloaded packages and debug
    symbols are kept, which is useful if you create sandboxes very often. If
    not given, the downloaded packages get deleted at program exit.

    sandbox_dir points to a directory with a permanently unpacked sandbox with
    the already unpacked packages. This speeds up operations even further if
    you need to create sandboxes for different reports very often; but the
    sandboxes can become very big over time, and you must ensure that an
    already existing sandbox matches the DistroRelease: and Architecture: of
    report. If not given, a temporary directory will be created which gets
    deleted at program exit.

    extra_packages can specify a list of additional packages to install which
    are not derived from the report.

    If verbose is True (False by default), this will write some additional
    logging to stdout. If log_timestamps is True, these log messages will be
    prefixed with the current time.

    Return a tuple (sandbox_dir, cache_dir, outdated_msg).
    '''
    # sandbox
    if sandbox_dir:
        sandbox_dir = os.path.abspath(sandbox_dir)
        if not os.path.isdir(sandbox_dir):
            os.makedirs(sandbox_dir)
        permanent_rootdir = True
    else:
        sandbox_dir = tempfile.mkdtemp(prefix='apport_sandbox_')
        atexit.register(shutil.rmtree, sandbox_dir)
        permanent_rootdir = False

    # cache
    if cache_dir:
        cache_dir = os.path.abspath(cache_dir)
    else:
        cache_dir = tempfile.mkdtemp(prefix='apport_cache_')
        atexit.register(shutil.rmtree, cache_dir)

    pkgs = []

    # when ProcMaps is available and we don't have any third-party packages, it
    # is enough to get the libraries in it and map their files to packages;
    # otherwise, get Package/Dependencies
    if 'ProcMaps' not in report or '[origin' in (report.get('Package', '') + report.get('Dependencies', '')):
        pkgs = needed_packages(report)

    # add user-specified extra packages, if any
    for p in extra_packages:
        pkgs.append((p, None))
    if config_dir == 'system':
        config_dir = None

    # unpack packages, if any, using cache and sandbox
    try:
        outdated_msg = apport.packaging.install_packages(
            sandbox_dir, config_dir, report['DistroRelease'], pkgs,
            verbose, cache_dir, permanent_rootdir,
            architecture=report.get('Architecture'))
    except SystemError as e:
        sys.stderr.write(str(e) + '\n')
        sys.exit(1)

    pkgs = needed_runtime_packages(report, sandbox_dir, cache_dir, verbose)

    # package hooks might reassign Package:, check that we have the originally
    # crashing binary
    for path in ('InterpreterPath', 'ExecutablePath'):
        if path in report and not os.path.exists(sandbox_dir + report[path]):
            pkg = apport.packaging.get_file_package(report[path], True, cache_dir,
                                                    release=report['DistroRelease'],
                                                    arch=report.get('Architecture'))
            if pkg:
                apport.log('Installing extra package %s to get %s' % (pkg, path), log_timestamps)
                pkgs.append((pkg, None))
            else:
                apport.warning('Cannot find package which ships %s', path)

    # unpack packages for executable using cache and sandbox
    if pkgs:
        try:
            outdated_msg += apport.packaging.install_packages(
                sandbox_dir, config_dir, report['DistroRelease'], pkgs,
                cache_dir=cache_dir, architecture=report.get('Architecture'))
        except SystemError as e:
            sys.stderr.write(str(e) + '\n')
            sys.exit(1)

    # sanity check: for a packaged binary we require having the executable in
    # the sandbox; TODO: for an unpackage binary we don't currently copy its
    # potential local library dependencies (like those in build trees) into the
    # sandbox, and we call gdb/valgrind on the binary outside the sandbox.
    if 'Package' in report:
        for path in ('InterpreterPath', 'ExecutablePath'):
            if path in report and not os.path.exists(sandbox_dir + report[path]):
                apport.error('%s %s does not exist (report specified package %s)',
                             path, sandbox_dir + report[path], report['Package'])
                sys.exit(0)

    if outdated_msg:
        report['RetraceOutdatedPackages'] = outdated_msg

    apport.memdbg('built sandbox')

    return sandbox_dir, cache_dir, outdated_msg
示例#8
0
    def install_packages(self, rootdir, configdir, release, packages,
            verbose=False, cache_dir=None):
        '''Install packages into a sandbox (for apport-retrace).

        In order to work without any special permissions and without touching
        the running system, this should only download and unpack packages into
        the given root directory, not install them into the system.

        configdir points to a directory with by-release configuration files for
        the packaging system; this is completely dependent on the backend
        implementation, the only assumption is that this looks into
        configdir/release/, so that you can use retracing for multiple
        DistroReleases. As a special case, if configdir is None, it uses the
        current system configuration, and "release" is ignored.

        release is the value of the report's 'DistroRelease' field.

        packages is a list of ('packagename', 'version') tuples. If the version
        is None, it should install the most current available version.
        
        If cache_dir is given, then the downloaded packages will be stored
        there, to speed up subsequent retraces.

        Return a string with outdated packages, or None if all packages were
        installed.
        '''
        if not configdir:
            apt_sources = '/etc/apt/sources.list'
        else:
            apt_sources = os.path.join(configdir, release, 'sources.list')
        if not os.path.exists(apt_sources):
            raise SystemError('%s does not exist' % apt_sources)

        # create apt sandbox
        if cache_dir:
            tmp_aptroot = False
            if configdir:
                aptroot = os.path.join(cache_dir, release, 'apt')
            else:
                aptroot = os.path.join(cache_dir, 'system', 'apt')
            try:
                os.makedirs(aptroot)
            except OSError:
                pass
        else:
            tmp_aptroot = True
            aptroot = tempfile.mkdtemp()

        self._build_apt_sandbox(aptroot, apt_sources)

        if verbose:
            fetchProgress = apt.progress.TextFetchProgress()
        else:
            fetchProgress = apt.progress.FetchProgress()
        c = apt.Cache()
        c.update(fetchProgress)
        c = apt.Cache()

        obsolete = ''

        # mark packages for installation
        for (pkg, ver) in packages:
            try:
                candidate = c[pkg].candidate
            except KeyError:
                candidate = None
            if not candidate:
                m = 'package %s does not exist, ignoring' % pkg
                obsolete += m + '\n'
                apport.warning(m)
                continue

            if ver and candidate.version != ver:
                w = '%s version %s required, but %s is available' % (pkg, ver, candidate.version)
                obsolete += w + '\n'
            c[pkg].mark_install(False, False)

            if candidate.architecture != 'all':
                if c.has_key(pkg + '-dbg'):
                    c[pkg + '-dbg'].mark_install(False, False)
                elif c.has_key(pkg + '-dbgsym'):
                    c[pkg + '-dbgsym'].mark_install(False, False)
                    if c[pkg + '-dbgsym'].candidate.version != candidate.version:
                        obsolete += 'outdated debug symbol package for %s: package version %s dbgsym version %s\n' % (
                                pkg, candidate.version, c[pkg + '-dbgsym'].candidate.version)


        # fetch packages
        fetcher = apt.apt_pkg.GetAcquire(fetchProgress)
        pm = apt.apt_pkg.GetPackageManager(c._depcache)
        try:
            res = c._fetchArchives(fetcher, pm)
        except apt.cache.FetchFailedException as e:
            apport.error('Package download error, try again later: %s', str(e))
            sys.exit(99) # transient error

        # unpack packages
        if verbose:
            print('Extracting downloaded debs...')
        for i in fetcher.Items:
            subprocess.check_call(['dpkg', '-x', i.DestFile, rootdir])

        if tmp_aptroot:
            shutil.rmtree(aptroot)

        # reset config
        apt.apt_pkg.init_config()

        return obsolete