예제 #1
0
def needed_runtime_packages(report,
                            sandbox,
                            pkgmap_cache_dir,
                            pkg_versions,
                            verbose=False):
    '''Determine necessary runtime packages for given report.

    This determines libraries dynamically loaded at runtime in two cases:
    1. The executable has already run: /proc/pid/maps is used, from the report
    2. The executable has not already run: shared_libraries() is used

    The libraries are resolved to the packages that installed them.

    Return list of (pkgname, None) pairs.

    When pkgmap_cache_dir is specified, it is used as a cache for
    get_file_package().
    '''
    # check list of libraries that the crashed process referenced at
    # runtime and warn about those which are not available
    pkgs = set()
    libs = set()
    if 'ProcMaps' in report:
        for l in report['ProcMaps'].splitlines():
            if not l.strip():
                continue
            cols = l.split()
            if len(cols) == 6 and 'x' in cols[1] and '.so' in cols[5]:
                lib = os.path.realpath(cols[5])
                libs.add(lib)
    else:
        # 'ProcMaps' key is absent in apport-valgrind use case
        libs = apport.fileutils.shared_libraries(
            report['ExecutablePath']).values()
    if not os.path.exists(pkgmap_cache_dir):
        os.makedirs(pkgmap_cache_dir)

    # grab as much as we can
    for l in libs:
        pkg = apport.packaging.get_file_package(
            l,
            True,
            pkgmap_cache_dir,
            release=report['DistroRelease'],
            arch=report.get('Architecture'))
        if pkg:
            if verbose:
                apport.log('dynamically loaded %s needs package %s, queueing' %
                           (l, pkg))
            pkgs.add(pkg)
        else:
            apport.warning('%s is needed, but cannot be mapped to a package',
                           l)

    return [(p, pkg_versions.get(p)) for p in pkgs]
예제 #2
0
    def get_modified_files(self, package):
        '''Return list of all modified files of a package.'''

        # get the maximum mtime of package files that we consider unmodified
        listfile = '/var/lib/dpkg/info/%s:%s.list' % (
            package, self.get_system_architecture())
        if not os.path.exists(listfile):
            listfile = '/var/lib/dpkg/info/%s.list' % package
        try:
            s = os.stat(listfile)
            if not stat.S_ISREG(s.st_mode):
                raise OSError
            max_time = max(s.st_mtime, s.st_ctime)
        except OSError:
            return []

        # create a list of files with a newer timestamp for md5sum'ing
        sums = b''
        sumfile = '/var/lib/dpkg/info/%s:%s.md5sums' % (
            package, self.get_system_architecture())
        if not os.path.exists(sumfile):
            sumfile = '/var/lib/dpkg/info/%s.md5sums' % package
            if not os.path.exists(sumfile):
                # some packages do not ship md5sums
                return []

        with open(sumfile, 'rb') as fd:
            for line in fd:
                try:
                    # ignore lines with NUL bytes (happens, LP#96050)
                    if b'\0' in line:
                        apport.warning(
                            '%s contains NUL character, ignoring line',
                            sumfile)
                        continue
                    words = line.split()
                    if not words:
                        apport.warning('%s contains empty line, ignoring line',
                                       sumfile)
                        continue
                    s = os.stat(
                        ('/' + words[-1].decode('UTF-8')).encode('UTF-8'))
                    if max(s.st_mtime, s.st_ctime) <= max_time:
                        continue
                except OSError:
                    pass

                sums += line

        if sums:
            return self._check_files_md5(sums)
        else:
            return []
예제 #3
0
def needed_runtime_packages(report, sandbox, cache_dir, verbose=False):
    '''Determine necessary runtime packages for given report.

    This determines libraries dynamically loaded at runtime in two cases:
    1. The executable has already run: /proc/pid/maps is used, from the report
    2. The executable has not already run: shared_libraries() is used

    The libraries are resolved to the packages that installed them.

    Return list of (pkgname, None) pairs.

    When cache_dir is specified, it is used as a cache for get_file_package().
    '''
    # check list of libraries that the crashed process referenced at
    # runtime and warn about those which are not available
    pkgs = set()
    libs = set()
    if 'ProcMaps' in report:
        for l in report['ProcMaps'].splitlines():
            if not l.strip():
                continue
            cols = l.split()
            if len(cols) == 6 and 'x' in cols[1] and '.so' in cols[5]:
                lib = os.path.realpath(cols[5])
                libs.add(lib)
    else:
        # 'ProcMaps' key is absent in apport-valgrind use case
        libs = apport.fileutils.shared_libraries(report['ExecutablePath']).values()
    if sandbox:
        cache_dir = os.path.join(cache_dir, report['DistroRelease'])
        if not os.path.exists(cache_dir):
            os.makedirs(cache_dir)

    # grab as much as we can
    for l in libs:
        if os.path.exists(sandbox + l):
            continue

        pkg = apport.packaging.get_file_package(l, True, cache_dir,
                                                release=report['DistroRelease'],
                                                arch=report.get('Architecture'))
        if pkg:
            if verbose:
                apport.log('dynamically loaded %s needs package %s, queueing' % (l, pkg))
            pkgs.add(pkg)
        else:
                apport.warning('%s is needed, but cannot be mapped to a package', l)

    return [(p, None) for p in pkgs]
예제 #4
0
def report_package_versions(report):
    '''Return package -> version dictionary from report'''

    pkg_vers = {}
    for l in (report.get('Package', '') + '\n' + report.get('Dependencies', '')).splitlines():
        if not l.strip():
            continue
        try:
            (pkg, version) = l.split()[:2]
        except ValueError:
            apport.warning('invalid Package/Dependencies line: %s', l)
            # invalid line, ignore
            continue
        pkg_vers[pkg] = version

    return pkg_vers
예제 #5
0
    def get_modified_files(self, package):
        '''Return list of all modified files of a package.'''

        # get the maximum mtime of package files that we consider unmodified
        listfile = '/var/lib/dpkg/info/%s:%s.list' % (package, self.get_system_architecture())
        if not os.path.exists(listfile):
            listfile = '/var/lib/dpkg/info/%s.list' % package
        try:
            s = os.stat(listfile)
            if not stat.S_ISREG(s.st_mode):
                raise OSError
            max_time = max(s.st_mtime, s.st_ctime)
        except OSError:
            return []

        # create a list of files with a newer timestamp for md5sum'ing
        sums = ''
        sumfile = '/var/lib/dpkg/info/%s:%s.md5sums' % (package, self.get_system_architecture())
        if not os.path.exists(sumfile):
            sumfile = '/var/lib/dpkg/info/%s.md5sums' % package
            if not os.path.exists(sumfile):
                # some packages do not ship md5sums
                return []

        with open(sumfile) as fd:
            for line in fd:
                try:
                    # ignore lines with NUL bytes (happens, LP#96050)
                    if '\0' in line:
                        apport.warning('%s contains NUL character, ignoring line', sumfile)
                        continue
                    words  = line.split()
                    if not words:
                        apport.warning('%s contains empty line, ignoring line', sumfile)
                        continue
                    s = os.stat('/' + words[-1])
                    if max(s.st_mtime, s.st_ctime) <= max_time:
                        continue
                except OSError:
                    pass

                sums += line

        if sums:
            return self._check_files_md5(sums)
        else:
            return []
예제 #6
0
def needed_packages(report):
    '''Determine necessary packages for given report.

    Return list of (pkgname, version) pairs. version might be None for unknown
    package versions.
    '''
    pkgs = {}

    # first, grab the versions that we captured at crash time
    for l in (report.get('Package', '') + '\n' + report.get('Dependencies', '')).splitlines():
        if not l.strip():
            continue
        try:
            (pkg, version) = l.split()[:2]
        except ValueError:
            apport.warning('invalid Package/Dependencies line: %s', l)
            # invalid line, ignore
            continue
        pkgs[pkg] = version

    return [(p, v) for (p, v) in pkgs.items()]
예제 #7
0
def needed_packages(report):
    '''Determine necessary packages for given report.

    Return list of (pkgname, version) pairs. version might be None for unknown
    package versions.
    '''
    pkgs = {}

    # first, grab the versions that we captured at crash time
    for l in (report.get('Package', '') + '\n' + report.get('Dependencies', '')).splitlines():
        if not l.strip():
            continue
        try:
            (pkg, version) = l.split()[:2]
        except ValueError:
            apport.warning('invalid Package/Dependencies line: %s', l)
            # invalid line, ignore
            continue
        pkgs[pkg] = version

    return [(p, v) for (p, v) in pkgs.items()]
예제 #8
0
    def install_packages(self, rootdir, configdir, release, packages,
            verbose=False, cache_dir=None):
        '''Install packages into a sandbox (for apport-retrace).

        In order to work without any special permissions and without touching
        the running system, this should only download and unpack packages into
        the given root directory, not install them into the system.

        configdir points to a directory with by-release configuration files for
        the packaging system; this is completely dependent on the backend
        implementation, the only assumption is that this looks into
        configdir/release/, so that you can use retracing for multiple
        DistroReleases. As a special case, if configdir is None, it uses the
        current system configuration, and "release" is ignored.

        release is the value of the report's 'DistroRelease' field.

        packages is a list of ('packagename', 'version') tuples. If the version
        is None, it should install the most current available version.
        
        If cache_dir is given, then the downloaded packages will be stored
        there, to speed up subsequent retraces.

        Return a string with outdated packages, or None if all packages were
        installed.

        If something is wrong with the environment (invalid configuration,
        package servers down, etc.), this should raise a SystemError with a
        meaningful error message.
        '''
        if not configdir:
            apt_sources = '/etc/apt/sources.list'
        else:
            apt_sources = os.path.join(configdir, release, 'sources.list')
        if not os.path.exists(apt_sources):
            raise SystemError('%s does not exist' % apt_sources)

        # create apt sandbox
        if cache_dir:
            tmp_aptroot = False
            if configdir:
                aptroot = os.path.join(cache_dir, release, 'apt')
            else:
                aptroot = os.path.join(cache_dir, 'system', 'apt')
            try:
                os.makedirs(aptroot)
            except OSError:
                pass
        else:
            tmp_aptroot = True
            aptroot = tempfile.mkdtemp()

        self._build_apt_sandbox(aptroot, apt_sources)

        if verbose:
            fetchProgress = apt.progress.text.AcquireProgress()
        else:
            fetchProgress = apt.progress.base.AcquireProgress()
        c = apt.Cache(rootdir=os.path.abspath(aptroot))
        try:
            c.update(fetchProgress)
        except apt.cache.FetchFailedException as e:
            raise SystemError(str(e))
        c.open()

        obsolete = ''

        # mark packages for installation
        real_pkgs = set()
        for (pkg, ver) in packages:
            try:
                candidate = c[pkg].candidate
            except KeyError:
                candidate = None
            if not candidate:
                m = 'package %s does not exist, ignoring' % pkg
                obsolete += m + '\n'
                apport.warning(m)
                continue

            if ver and candidate.version != ver:
                w = '%s version %s required, but %s is available' % (pkg, ver, candidate.version)
                obsolete += w + '\n'
            real_pkgs.add(pkg)

            if candidate.architecture != 'all':
                if pkg + '-dbg' in c:
                    real_pkgs.add(pkg + '-dbg')
                elif pkg + '-dbgsym' in c:
                    real_pkgs.add(pkg + '-dbgsym')
                    if c[pkg + '-dbgsym'].candidate.version != candidate.version:
                        obsolete += 'outdated debug symbol package for %s: package version %s dbgsym version %s\n' % (
                                pkg, candidate.version, c[pkg + '-dbgsym'].candidate.version)

        for p in real_pkgs:
            c[p].mark_install(False, False)

        # fetch packages
        fetcher = apt.apt_pkg.Acquire(fetchProgress)
        try:
            res = c.fetch_archives(fetcher=fetcher)
        except apt.cache.FetchFailedException as e:
            apport.error('Package download error, try again later: %s', str(e))
            sys.exit(99) # transient error

        # unpack packages
        if verbose:
            print('Extracting downloaded debs...')
        for i in fetcher.items:
            subprocess.check_call(['dpkg', '-x', i.destfile, rootdir])
            real_pkgs.remove(os.path.basename(i.destfile).split('_', 1)[0])

        if tmp_aptroot:
            shutil.rmtree(aptroot)

        # check bookkeeping that apt fetcher really got everything
        assert not real_pkgs, 'apt fetcher did not fetch these packages: ' \
            + ' '.join(real_pkgs)

        # work around python-apt bug that causes parts of the Cache(rootdir=)
        # argument configuration to be persistent; this resets the apt
        # configuration to system defaults again
        apt.Cache(rootdir='/')
        self._apt_cache = None

        return obsolete
예제 #9
0
    def install_packages(self, rootdir, configdir, release, packages,
                         verbose=False, cache_dir=None,
                         permanent_rootdir=False, architecture=None):
        '''Install packages into a sandbox (for apport-retrace).

        In order to work without any special permissions and without touching
        the running system, this should only download and unpack packages into
        the given root directory, not install them into the system.

        configdir points to a directory with by-release configuration files for
        the packaging system; this is completely dependent on the backend
        implementation, the only assumption is that this looks into
        configdir/release/, so that you can use retracing for multiple
        DistroReleases. As a special case, if configdir is None, it uses the
        current system configuration, and "release" is ignored.

        release is the value of the report's 'DistroRelease' field.

        packages is a list of ('packagename', 'version') tuples. If the version
        is None, it should install the most current available version.

        If cache_dir is given, then the downloaded packages will be stored
        there, to speed up subsequent retraces.

        If permanent_rootdir is True, then the sandbox created from the
        downloaded packages will be reused, to speed up subsequent retraces.

        If architecture is given, the sandbox will be created with packages of
        the given architecture (as specified in a report's "Architecture"
        field). If not given it defaults to the host system's architecture.

        Return a string with outdated packages, or None if all packages were
        installed.

        If something is wrong with the environment (invalid configuration,
        package servers down, etc.), this should raise a SystemError with a
        meaningful error message.
        '''
        if not configdir:
            apt_sources = '/etc/apt/sources.list'
            self.current_release_codename = self.get_distro_codename()
        else:
            # support architecture specific config, fall back to global config
            apt_sources = os.path.join(configdir, release, 'sources.list')
            if architecture:
                arch_apt_sources = os.path.join(configdir, release,
                                                architecture, 'sources.list')
                if os.path.exists(arch_apt_sources):
                    apt_sources = arch_apt_sources

            # set mirror for get_file_package()
            try:
                self.set_mirror(self._get_primary_mirror_from_apt_sources(apt_sources))
            except SystemError as e:
                apport.warning('cannot determine mirror: %s' % str(e))

            # set current release code name for _distro_release_to_codename
            with open(os.path.join(configdir, release, 'codename')) as f:
                self.current_release_codename = f.read().strip()

        if not os.path.exists(apt_sources):
            raise SystemError('%s does not exist' % apt_sources)

        # create apt sandbox
        if cache_dir:
            tmp_aptroot = False
            if configdir:
                aptroot = os.path.join(cache_dir, release, 'apt')
            else:
                aptroot = os.path.join(cache_dir, 'system', 'apt')
            if not os.path.isdir(aptroot):
                os.makedirs(aptroot)
        else:
            tmp_aptroot = True
            aptroot = tempfile.mkdtemp()

        if architecture:
            apt.apt_pkg.config.set('APT::Architecture', architecture)
        else:
            apt.apt_pkg.config.set('APT::Architecture', self.get_system_architecture())

        if verbose:
            fetchProgress = apt.progress.text.AcquireProgress()
        else:
            fetchProgress = apt.progress.base.AcquireProgress()
        if not tmp_aptroot:
            c = self._sandbox_cache(aptroot, apt_sources, fetchProgress)
        else:
            self._build_apt_sandbox(aptroot, apt_sources)
            c = apt.Cache(rootdir=os.path.abspath(aptroot))
            try:
                c.update(fetchProgress)
            except apt.cache.FetchFailedException as e:
                raise SystemError(str(e))
            c.open()

        obsolete = ''

        src_records = apt.apt_pkg.SourceRecords()

        # mark packages for installation
        real_pkgs = set()
        for (pkg, ver) in packages:
            try:
                candidate = c[pkg].candidate
            except KeyError:
                candidate = None
            if not candidate:
                m = 'package %s does not exist, ignoring' % pkg.replace('%', '%%')
                obsolete += m + '\n'
                apport.warning(m)
                continue

            if ver and candidate.version != ver:
                w = '%s version %s required, but %s is available' % (pkg, ver, candidate.version)
                obsolete += w + '\n'
            real_pkgs.add(pkg)

            if permanent_rootdir:
                virtual_mapping = self._virtual_mapping(aptroot)
                # Remember all the virtual packages that this package provides,
                # so that if we encounter that virtual package as a
                # Conflicts/Replaces later, we know to remove this package from
                # the cache.
                for p in candidate.provides:
                    virtual_mapping.setdefault(p, set()).add(pkg)
                conflicts = []
                if 'Conflicts' in candidate.record:
                    conflicts += apt.apt_pkg.parse_depends(candidate.record['Conflicts'])
                if 'Replaces' in candidate.record:
                    conflicts += apt.apt_pkg.parse_depends(candidate.record['Replaces'])
                archives = apt.apt_pkg.config.find_dir('Dir::Cache::archives')
                for conflict in conflicts:
                    # apt_pkg.parse_depends needs to handle the or operator,
                    # but as policy states it is invalid to use that in
                    # Replaces/Depends, we can safely choose the first value
                    # here.
                    conflict = conflict[0]
                    if c.is_virtual_package(conflict[0]):
                        try:
                            providers = virtual_mapping[conflict[0]]
                        except KeyError:
                            # We may not have seen the virtual package that
                            # this conflicts with, so we can assume it's not
                            # unpacked into the sandbox.
                            continue
                        for p in providers:
                            debs = os.path.join(archives, '%s_*.deb' % p)
                            for path in glob.glob(debs):
                                ver = self._deb_version(path)
                                if apt.apt_pkg.check_dep(ver, conflict[2], conflict[1]):
                                    os.unlink(path)
                        del providers
                    else:
                        debs = os.path.join(archives, '%s_*.deb' % conflict[0])
                        for path in glob.glob(debs):
                            ver = self._deb_version(path)
                            if apt.apt_pkg.check_dep(ver, conflict[2], conflict[1]):
                                os.unlink(path)

            if candidate.architecture != 'all':
                if pkg + '-dbg' in c:
                    real_pkgs.add(pkg + '-dbg')
                else:
                    # install all -dbg from the source package
                    if src_records.lookup(candidate.source_name):
                        dbgs = [p for p in src_records.binaries if p.endswith('-dbg') and p in c]
                    else:
                        dbgs = []
                    if dbgs:
                        for p in dbgs:
                            real_pkgs.add(p)
                    else:
                        if pkg + '-dbgsym' in c:
                            real_pkgs.add(pkg + '-dbgsym')
                            if c[pkg + '-dbgsym'].candidate.version != candidate.version:
                                obsolete += 'outdated debug symbol package for %s: package version %s dbgsym version %s\n' % (
                                    pkg, candidate.version, c[pkg + '-dbgsym'].candidate.version)

        for p in real_pkgs:
            c[p].mark_install(False, False)

        last_written = time.time()
        # fetch packages
        fetcher = apt.apt_pkg.Acquire(fetchProgress)
        try:
            c.fetch_archives(fetcher=fetcher)
        except apt.cache.FetchFailedException as e:
            apport.error('Package download error, try again later: %s', str(e))
            sys.exit(99)  # transient error

        # unpack packages
        if verbose:
            print('Extracting downloaded debs...')
        for i in fetcher.items:
            if not permanent_rootdir or os.path.getctime(i.destfile) > last_written:
                subprocess.check_call(['dpkg', '-x', i.destfile, rootdir])
            real_pkgs.remove(os.path.basename(i.destfile).split('_', 1)[0])

        if tmp_aptroot:
            shutil.rmtree(aptroot)

        # check bookkeeping that apt fetcher really got everything
        assert not real_pkgs, 'apt fetcher did not fetch these packages: ' \
            + ' '.join(real_pkgs)

        if permanent_rootdir:
            self._save_virtual_mapping(aptroot)

        return obsolete
예제 #10
0
    def install_packages(self,
                         rootdir,
                         configdir,
                         release,
                         packages,
                         verbose=False,
                         cache_dir=None,
                         permanent_rootdir=False,
                         architecture=None):
        '''Install packages into a sandbox (for apport-retrace).

        In order to work without any special permissions and without touching
        the running system, this should only download and unpack packages into
        the given root directory, not install them into the system.

        configdir points to a directory with by-release configuration files for
        the packaging system; this is completely dependent on the backend
        implementation, the only assumption is that this looks into
        configdir/release/, so that you can use retracing for multiple
        DistroReleases. As a special case, if configdir is None, it uses the
        current system configuration, and "release" is ignored.

        release is the value of the report's 'DistroRelease' field.

        packages is a list of ('packagename', 'version') tuples. If the version
        is None, it should install the most current available version.

        If cache_dir is given, then the downloaded packages will be stored
        there, to speed up subsequent retraces.

        If permanent_rootdir is True, then the sandbox created from the
        downloaded packages will be reused, to speed up subsequent retraces.

        If architecture is given, the sandbox will be created with packages of
        the given architecture (as specified in a report's "Architecture"
        field). If not given it defaults to the host system's architecture.

        Return a string with outdated packages, or None if all packages were
        installed.

        If something is wrong with the environment (invalid configuration,
        package servers down, etc.), this should raise a SystemError with a
        meaningful error message.
        '''
        if not configdir:
            apt_sources = '/etc/apt/sources.list'
            self.current_release_codename = self.get_distro_codename()
        else:
            # support architecture specific config, fall back to global config
            apt_sources = os.path.join(configdir, release, 'sources.list')
            if architecture:
                arch_apt_sources = os.path.join(configdir, release,
                                                architecture, 'sources.list')
                if os.path.exists(arch_apt_sources):
                    apt_sources = arch_apt_sources

            # set mirror for get_file_package()
            try:
                self.set_mirror(
                    self._get_primary_mirror_from_apt_sources(apt_sources))
            except SystemError as e:
                apport.warning('cannot determine mirror: %s' % str(e))

            # set current release code name for _distro_release_to_codename
            with open(os.path.join(configdir, release, 'codename')) as f:
                self.current_release_codename = f.read().strip()

        if not os.path.exists(apt_sources):
            raise SystemError('%s does not exist' % apt_sources)

        # create apt sandbox
        if cache_dir:
            tmp_aptroot = False
            if configdir:
                aptroot = os.path.join(cache_dir, release, 'apt')
            else:
                aptroot = os.path.join(cache_dir, 'system', 'apt')
            if not os.path.isdir(aptroot):
                os.makedirs(aptroot)
        else:
            tmp_aptroot = True
            aptroot = tempfile.mkdtemp()

        if architecture:
            apt.apt_pkg.config.set('APT::Architecture', architecture)
        else:
            apt.apt_pkg.config.set('APT::Architecture',
                                   self.get_system_architecture())

        if verbose:
            fetchProgress = apt.progress.text.AcquireProgress()
        else:
            fetchProgress = apt.progress.base.AcquireProgress()
        if not tmp_aptroot:
            c = self._sandbox_cache(aptroot, apt_sources, fetchProgress)
        else:
            self._build_apt_sandbox(aptroot, apt_sources)
            c = apt.Cache(rootdir=os.path.abspath(aptroot))
            try:
                c.update(fetchProgress)
            except apt.cache.FetchFailedException as e:
                raise SystemError(str(e))
            c.open()

        obsolete = ''

        src_records = apt.apt_pkg.SourceRecords()

        REPO = 'http://archive.zentyal.org/old'
        UBUNTU_REPO = 'http://archive.ubuntu.com/ubuntu'
        DBG_PKGS = {
            'libldb1': 'ldb',
            'libsope1': 'sope',
            'libc6': 'eglibc',
            'libwbxml2-0': 'wbxml2',
            'openchangeserver': 'openchange',
            'openchangeproxy': 'openchange',
            'libmapi0': 'openchange',
            'libmapistore0': 'openchange',
            'libmapiproxy0': 'openchange',
            'python-ldb': 'ldb',
            'samba': 'samba',
            'sogo': 'sogo',
            'sogo-openchange': 'sogo',
            'libtalloc2': 'talloc',
            'libtevent0': 'tevent',
            'libntdb1': 'ntdb'
        }

        # urls to overwrite dbg packages with proper version
        dbg_pkg_urls = {}
        pkgs_not_found = []

        # mark packages for installation
        real_pkgs = set()
        for (pkg, ver) in packages:
            if not ver: continue
            if pkg in DBG_PKGS.keys():
                source = DBG_PKGS[pkg]
                for pname in [pkg, pkg + '-dbg']:
                    for repo in (REPO, UBUNTU_REPO):
                        filename = pname + '_' + re.sub('^.*:', '',
                                                        ver) + '_amd64.deb'
                        url = repo + '/pool/main/' + source[
                            0:1] + '/' + source + '/' + filename
                        if requests.head(url).status_code == 200:
                            dbg_pkg_urls[pname] = url
                            break
                        elif repo == UBUNTU_REPO and pname[-4:] != '-dbg':
                            pkgs_not_found.append(filename)

            try:
                candidate = c[pkg].candidate
            except KeyError:
                candidate = None
            if not candidate:
                m = 'package %s does not exist, ignoring' % pkg.replace(
                    '%', '%%')
                obsolete += m + '\n'
                apport.warning(m)
                continue

            if ver and candidate.version != ver:
                w = '%s version %s required, but %s is available' % (
                    pkg, ver, candidate.version)
                obsolete += w + '\n'
            real_pkgs.add(pkg)

            if permanent_rootdir:
                virtual_mapping = self._virtual_mapping(aptroot)
                # Remember all the virtual packages that this package provides,
                # so that if we encounter that virtual package as a
                # Conflicts/Replaces later, we know to remove this package from
                # the cache.
                for p in candidate.provides:
                    virtual_mapping.setdefault(p, set()).add(pkg)
                conflicts = []
                if 'Conflicts' in candidate.record:
                    conflicts += apt.apt_pkg.parse_depends(
                        candidate.record['Conflicts'])
                if 'Replaces' in candidate.record:
                    conflicts += apt.apt_pkg.parse_depends(
                        candidate.record['Replaces'])
                archives = apt.apt_pkg.config.find_dir('Dir::Cache::archives')
                for conflict in conflicts:
                    # apt_pkg.parse_depends needs to handle the or operator,
                    # but as policy states it is invalid to use that in
                    # Replaces/Depends, we can safely choose the first value
                    # here.
                    conflict = conflict[0]
                    if c.is_virtual_package(conflict[0]):
                        try:
                            providers = virtual_mapping[conflict[0]]
                        except KeyError:
                            # We may not have seen the virtual package that
                            # this conflicts with, so we can assume it's not
                            # unpacked into the sandbox.
                            continue
                        for p in providers:
                            debs = os.path.join(archives, '%s_*.deb' % p)
                            for path in glob.glob(debs):
                                ver = self._deb_version(path)
                                if apt.apt_pkg.check_dep(
                                        ver, conflict[2], conflict[1]):
                                    os.unlink(path)
                        del providers
                    else:
                        debs = os.path.join(archives, '%s_*.deb' % conflict[0])
                        for path in glob.glob(debs):
                            ver = self._deb_version(path)
                            if apt.apt_pkg.check_dep(ver, conflict[2],
                                                     conflict[1]):
                                os.unlink(path)

            if candidate.architecture != 'all':
                if pkg + '-dbg' in c:
                    real_pkgs.add(pkg + '-dbg')
                else:
                    # install all -dbg from the source package
                    if src_records.lookup(candidate.source_name):
                        dbgs = [
                            p for p in src_records.binaries
                            if p.endswith('-dbg') and p in c
                        ]
                    else:
                        dbgs = []
                    if dbgs:
                        for p in dbgs:
                            real_pkgs.add(p)
                    else:
                        if pkg + '-dbgsym' in c:
                            real_pkgs.add(pkg + '-dbgsym')
                            if c[pkg +
                                 '-dbgsym'].candidate.version != candidate.version:
                                obsolete += 'outdated debug symbol package for %s: package version %s dbgsym version %s\n' % (
                                    pkg, candidate.version,
                                    c[pkg + '-dbgsym'].candidate.version)

        if pkgs_not_found:
            print "Aborting retrace as some packages cannot be found in the repos:"
            print "\n".join(pkgs_not_found)
            sys.exit(1)

        for p in real_pkgs:
            c[p].mark_install(False, False)

        last_written = time.time()
        # fetch packages
        fetcher = apt.apt_pkg.Acquire(fetchProgress)
        try:
            c.fetch_archives(fetcher=fetcher)
        except apt.cache.FetchFailedException as e:
            apport.error('Package download error, try again later: %s', str(e))
            sys.exit(99)  # transient error

        # FIXME: unhardcode path
        pkgs_path = cache_dir + '/Ubuntu 14.04/apt/var/cache/apt/archives'
        debs_to_unpack = []
        for pname, url in dbg_pkg_urls.iteritems():
            # Remove other versions before downloading the proper one
            for i in glob.glob(os.path.join(pkgs_path, '%s_*.deb' % pname)):
                #print "DELETING AUTO-FETCHED DEB: " + i
                os.unlink(i)
            #print "DOWNLOADING URL: " + url + "\n";
            urlretrieve(url, pkgs_path + '/' + os.path.basename(url))
            debs_to_unpack.append(pkgs_path + '/' + os.path.basename(url))

        # unpack packages
        if verbose:
            print('Extracting downloaded debs...')
        for i in fetcher.items:
            if not permanent_rootdir or os.path.getctime(
                    i.destfile) > last_written:
                if os.path.isfile(i.destfile):
                    subprocess.check_call(['dpkg', '-x', i.destfile, rootdir])
            real_pkgs.remove(os.path.basename(i.destfile).split('_', 1)[0])

        for debfile in debs_to_unpack:
            #print "UNPACKING NEW DOWNLOADED DEB: " + debfile + " IN: " + rootdir
            subprocess.check_call(['dpkg', '-x', debfile, rootdir])

        if tmp_aptroot:
            shutil.rmtree(aptroot)

        # check bookkeeping that apt fetcher really got everything
        assert not real_pkgs, 'apt fetcher did not fetch these packages: ' \
            + ' '.join(real_pkgs)

        if permanent_rootdir:
            self._save_virtual_mapping(aptroot)

        return obsolete
예제 #11
0
def make_sandbox(report,
                 config_dir,
                 cache_dir=None,
                 sandbox_dir=None,
                 extra_packages=[],
                 verbose=False,
                 log_timestamps=False):
    '''Build a sandbox with the packages that belong to a particular report.

    This downloads and unpacks all packages from the report's Package and
    Dependencies fields, plus all packages that ship the files from ProcMaps
    (often, runtime plugins do not appear in Dependencies), plus optionally
    some extra ones, for the distro release and architecture of the report.

    For unpackaged executables, there are no Dependencies. Packages for shared
    libaries are unpacked.

    report is an apport.Report object to build a sandbox for. Presence of the
    Package field determines whether to determine dependencies through
    packaging (via the optional report['Dependencies'] field), or through ldd
    via needed_runtime_packages() -> shared_libraries().  Usually
    report['Architecture'] and report['Uname'] are present.

    config_dir points to a directory with by-release configuration files for
    the packaging system, or "system"; this is passed to
    apport.packaging.install_packages(), see that method for details.

    cache_dir points to a directory where the downloaded packages and debug
    symbols are kept, which is useful if you create sandboxes very often. If
    not given, the downloaded packages get deleted at program exit.

    sandbox_dir points to a directory with a permanently unpacked sandbox with
    the already unpacked packages. This speeds up operations even further if
    you need to create sandboxes for different reports very often; but the
    sandboxes can become very big over time, and you must ensure that an
    already existing sandbox matches the DistroRelease: and Architecture: of
    report. If not given, a temporary directory will be created which gets
    deleted at program exit.

    extra_packages can specify a list of additional packages to install which
    are not derived from the report.

    If verbose is True (False by default), this will write some additional
    logging to stdout. If log_timestamps is True, these log messages will be
    prefixed with the current time.

    Return a tuple (sandbox_dir, cache_dir, outdated_msg).
    '''
    # sandbox
    if sandbox_dir:
        sandbox_dir = os.path.abspath(sandbox_dir)
        if not os.path.isdir(sandbox_dir):
            os.makedirs(sandbox_dir)
        permanent_rootdir = True
    else:
        sandbox_dir = tempfile.mkdtemp(prefix='apport_sandbox_')
        atexit.register(shutil.rmtree, sandbox_dir)
        permanent_rootdir = False

    # cache
    if cache_dir:
        cache_dir = os.path.abspath(cache_dir)
    else:
        cache_dir = tempfile.mkdtemp(prefix='apport_cache_')
        atexit.register(shutil.rmtree, cache_dir)

    pkgs = []

    # when ProcMaps is available and we don't have any third-party packages, it
    # is enough to get the libraries in it and map their files to packages;
    # otherwise, get Package/Dependencies
    if 'ProcMaps' not in report or '[origin' in (
            report.get('Package', '') + report.get('Dependencies', '')):
        pkgs = needed_packages(report)

    # add user-specified extra packages, if any
    for p in extra_packages:
        pkgs.append((p, None))
    if config_dir == 'system':
        config_dir = None

    # unpack packages, if any, using cache and sandbox
    try:
        outdated_msg = apport.packaging.install_packages(
            sandbox_dir,
            config_dir,
            report['DistroRelease'],
            pkgs,
            verbose,
            cache_dir,
            permanent_rootdir,
            architecture=report.get('Architecture'))
    except SystemError as e:
        sys.stderr.write(str(e) + '\n')
        sys.exit(1)

    pkgs = needed_runtime_packages(report, sandbox_dir, cache_dir, verbose)

    # package hooks might reassign Package:, check that we have the originally
    # crashing binary
    for path in ('InterpreterPath', 'ExecutablePath'):
        if path in report and not os.path.exists(sandbox_dir + report[path]):
            pkg = apport.packaging.get_file_package(
                report[path],
                True,
                cache_dir,
                release=report['DistroRelease'],
                arch=report.get('Architecture'))
            if pkg:
                apport.log(
                    'Installing extra package %s to get %s' % (pkg, path),
                    log_timestamps)
                pkgs.append((pkg, None))
            else:
                apport.warning('Cannot find package which ships %s', path)

    # unpack packages for executable using cache and sandbox
    if pkgs:
        try:
            outdated_msg += apport.packaging.install_packages(
                sandbox_dir,
                config_dir,
                report['DistroRelease'],
                pkgs,
                cache_dir=cache_dir,
                architecture=report.get('Architecture'))
        except SystemError as e:
            sys.stderr.write(str(e) + '\n')
            sys.exit(1)

    # sanity check: for a packaged binary we require having the executable in
    # the sandbox; TODO: for an unpackage binary we don't currently copy its
    # potential local library dependencies (like those in build trees) into the
    # sandbox, and we call gdb/valgrind on the binary outside the sandbox.
    if 'Package' in report:
        for path in ('InterpreterPath', 'ExecutablePath'):
            if path in report and not os.path.exists(sandbox_dir +
                                                     report[path]):
                apport.error(
                    '%s %s does not exist (report specified package %s)', path,
                    sandbox_dir + report[path], report['Package'])
                sys.exit(0)

    if outdated_msg:
        report['RetraceOutdatedPackages'] = outdated_msg

    apport.memdbg('built sandbox')

    return sandbox_dir, cache_dir, outdated_msg
예제 #12
0
    def _check_crash_report(self, main_file):
        '''Check that we have one crash report, and verify its contents'''

        reports = apport.fileutils.get_new_reports()
        self.assertEqual(len(reports), 1, 'did not create a crash report')
        r = apport.Report()
        r.load(open(reports[0]))
        self.assertEqual(r['ProblemType'], 'Crash')
        self.assertTrue(r['ProcCmdline'].startswith('java -classpath'))
        self.assertTrue(r['StackTrace'].startswith(
            "java.lang.RuntimeException: Can't catch this"))
        if '.jar!' in main_file:
            self.assertEqual(r['MainClassUrl'], 'jar:file:' + main_file)
        else:
            self.assertEqual(r['MainClassUrl'], 'file:' + main_file)
        self.assertTrue('DistroRelease' in r)
        self.assertTrue('ProcCwd' in r)

#
# main
#

try:
    subprocess.check_call(['java', '-version'], stdout=subprocess.PIPE,
            stderr=subprocess.PIPE)
except OSError:
    apport.warning('Java not available, skipping')
    sys.exit(0)

unittest.main()
예제 #13
0
    def install_packages(self, rootdir, configdir, release, packages,
                         verbose=False, cache_dir=None,
                         permanent_rootdir=False, architecture=None):
        '''Install packages into a sandbox (for apport-retrace).

        In order to work without any special permissions and without touching
        the running system, this should only download and unpack packages into
        the given root directory, not install them into the system.

        configdir points to a directory with by-release configuration files for
        the packaging system; this is completely dependent on the backend
        implementation, the only assumption is that this looks into
        configdir/release/, so that you can use retracing for multiple
        DistroReleases. As a special case, if configdir is None, it uses the
        current system configuration, and "release" is ignored.

        release is the value of the report's 'DistroRelease' field.

        packages is a list of ('packagename', 'version') tuples. If the version
        is None, it should install the most current available version.

        If cache_dir is given, then the downloaded packages will be stored
        there, to speed up subsequent retraces.

        If permanent_rootdir is True, then the sandbox created from the
        downloaded packages will be reused, to speed up subsequent retraces.

        If architecture is given, the sandbox will be created with packages of
        the given architecture (as specified in a report's "Architecture"
        field). If not given it defaults to the host system's architecture.

        Return a string with outdated packages, or None if all packages were
        installed.

        If something is wrong with the environment (invalid configuration,
        package servers down, etc.), this should raise a SystemError with a
        meaningful error message.
        '''
        if not configdir:
            apt_sources = '/etc/apt/sources.list'
            self.current_release_codename = self.get_distro_codename()
        else:
            # support architecture specific config, fall back to global config
            apt_sources = os.path.join(configdir, release, 'sources.list')
            if architecture:
                arch_apt_sources = os.path.join(configdir, release,
                                                architecture, 'sources.list')
                if os.path.exists(arch_apt_sources):
                    apt_sources = arch_apt_sources

            # set mirror for get_file_package()
            try:
                self.set_mirror(self._get_primary_mirror_from_apt_sources(apt_sources))
            except SystemError as e:
                apport.warning('cannot determine mirror: %s' % str(e))

            # set current release code name for _distro_release_to_codename
            with open(os.path.join(configdir, release, 'codename')) as f:
                self.current_release_codename = f.read().strip()

        if not os.path.exists(apt_sources):
            raise SystemError('%s does not exist' % apt_sources)

        # create apt sandbox
        if cache_dir:
            tmp_aptroot = False
            if configdir:
                aptroot = os.path.join(cache_dir, release, 'apt')
            else:
                aptroot = os.path.join(cache_dir, 'system', 'apt')
            if not os.path.isdir(aptroot):
                os.makedirs(aptroot)
        else:
            tmp_aptroot = True
            aptroot = tempfile.mkdtemp()

        if architecture:
            apt.apt_pkg.config.set('APT::Architecture', architecture)
        else:
            apt.apt_pkg.config.set('APT::Architecture', self.get_system_architecture())

        if verbose:
            fetchProgress = apt.progress.text.AcquireProgress()
        else:
            fetchProgress = apt.progress.base.AcquireProgress()
        if not tmp_aptroot:
            c = self._sandbox_cache(aptroot, apt_sources, fetchProgress)
        else:
            self._build_apt_sandbox(aptroot, apt_sources)
            c = apt.Cache(rootdir=os.path.abspath(aptroot))
            try:
                c.update(fetchProgress)
            except apt.cache.FetchFailedException as e:
                raise SystemError(str(e))
            c.open()

        obsolete = ''

        src_records = apt.apt_pkg.SourceRecords()

        REPO = 'http://archive.zentyal.org/old'
        UBUNTU_REPO = 'http://archive.ubuntu.com/ubuntu'
        DBG_PKGS = {'libldb1': 'ldb',
                    'libsope1': 'sope',
                    'libc6': 'eglibc',
                    'libwbclient0': 'samba',
                    'libwbxml2-0': 'wbxml2',
                    'openchangeserver': 'openchange',
                    'openchangeproxy': 'openchange',
                    'libmapi0': 'openchange',
                    'libmapistore0': 'openchange',
                    'libmapiproxy0': 'openchange',
                    'libnss-winbind': 'samba',
                    'libpam-winbind': 'samba',
                    'python-ldb': 'ldb',
                    'python-talloc': 'talloc',
                    'samba': 'samba',
                    'samba-common-bin': 'samba',
                    'samba-dsdb-modules': 'samba',
                    'samba-libs': 'samba',
                    'samba-testsuite': 'samba',
                    'samba-vfs-modules': 'samba',
                    'sogo': 'sogo',
                    'sogo-openchange': 'sogo',
                    'libtalloc2': 'talloc',
                    'libtevent0': 'tevent',
                    'libntdb1': 'ntdb',
                    'winbind': 'samba'}

        # urls to overwrite dbg packages with proper version
        dbg_pkg_urls = {}
        pkgs_not_found = []

        # mark packages for installation
        real_pkgs = set()
        for (pkg, ver) in packages:
            if not ver: continue
            if pkg in DBG_PKGS.keys():
                source = DBG_PKGS[pkg]
                for pname in [pkg, pkg + '-dbg']:
                    for repo in (REPO, UBUNTU_REPO):
                        filename = pname + '_' + re.sub('^.*:', '', ver) + '_amd64.deb'
                        url = repo + '/pool/main/' + source[0:1] + '/' + source + '/' + filename
                        if requests.head(url).status_code == 200:
                            dbg_pkg_urls[pname] = url
                            break
                        elif repo == UBUNTU_REPO and pname[-4:] != '-dbg':
                            pkgs_not_found.append(filename)

            try:
                candidate = c[pkg].candidate
            except KeyError:
                candidate = None
            if not candidate:
                m = 'package %s does not exist, ignoring' % pkg.replace('%', '%%')
                obsolete += m + '\n'
                apport.warning(m)
                continue

            if ver and candidate.version != ver:
                w = '%s version %s required, but %s is available' % (pkg, ver, candidate.version)
                obsolete += w + '\n'
            real_pkgs.add(pkg)

            if permanent_rootdir:
                virtual_mapping = self._virtual_mapping(aptroot)
                # Remember all the virtual packages that this package provides,
                # so that if we encounter that virtual package as a
                # Conflicts/Replaces later, we know to remove this package from
                # the cache.
                for p in candidate.provides:
                    virtual_mapping.setdefault(p, set()).add(pkg)
                conflicts = []
                if 'Conflicts' in candidate.record:
                    conflicts += apt.apt_pkg.parse_depends(candidate.record['Conflicts'])
                if 'Replaces' in candidate.record:
                    conflicts += apt.apt_pkg.parse_depends(candidate.record['Replaces'])
                archives = apt.apt_pkg.config.find_dir('Dir::Cache::archives')
                for conflict in conflicts:
                    # apt_pkg.parse_depends needs to handle the or operator,
                    # but as policy states it is invalid to use that in
                    # Replaces/Depends, we can safely choose the first value
                    # here.
                    conflict = conflict[0]
                    if c.is_virtual_package(conflict[0]):
                        try:
                            providers = virtual_mapping[conflict[0]]
                        except KeyError:
                            # We may not have seen the virtual package that
                            # this conflicts with, so we can assume it's not
                            # unpacked into the sandbox.
                            continue
                        for p in providers:
                            debs = os.path.join(archives, '%s_*.deb' % p)
                            for path in glob.glob(debs):
                                ver = self._deb_version(path)
                                if apt.apt_pkg.check_dep(ver, conflict[2], conflict[1]):
                                    os.unlink(path)
                        del providers
                    else:
                        debs = os.path.join(archives, '%s_*.deb' % conflict[0])
                        for path in glob.glob(debs):
                            ver = self._deb_version(path)
                            if apt.apt_pkg.check_dep(ver, conflict[2], conflict[1]):
                                os.unlink(path)

            if candidate.architecture != 'all':
                if pkg + '-dbg' in c:
                    real_pkgs.add(pkg + '-dbg')
                else:
                    # install all -dbg from the source package
                    if src_records.lookup(candidate.source_name):
                        dbgs = [p for p in src_records.binaries if p.endswith('-dbg') and p in c]
                    else:
                        dbgs = []
                    if dbgs:
                        for p in dbgs:
                            real_pkgs.add(p)
                    else:
                        if pkg + '-dbgsym' in c:
                            real_pkgs.add(pkg + '-dbgsym')
                            if c[pkg + '-dbgsym'].candidate.version != candidate.version:
                                obsolete += 'outdated debug symbol package for %s: package version %s dbgsym version %s\n' % (
                                    pkg, candidate.version, c[pkg + '-dbgsym'].candidate.version)

        if pkgs_not_found:
            print "Aborting retrace as some packages cannot be found in the repos:"
            print "\n".join(pkgs_not_found)
            sys.exit(1)

        for p in real_pkgs:
            c[p].mark_install(False, False)

        last_written = time.time()
        # fetch packages
        fetcher = apt.apt_pkg.Acquire(fetchProgress)
        try:
            c.fetch_archives(fetcher=fetcher)
        except apt.cache.FetchFailedException as e:
            apport.error('Package download error, try again later: %s', str(e))
            sys.exit(99)  # transient error

        # FIXME: unhardcode path
        pkgs_path = cache_dir + '/Ubuntu 14.04/apt/var/cache/apt/archives'
        debs_to_unpack = []
        for pname, url in dbg_pkg_urls.iteritems():
            # Remove other versions before downloading the proper one
            for i in glob.glob(os.path.join(pkgs_path, '%s_*.deb' % pname)):
                #print "DELETING AUTO-FETCHED DEB: " + i
                os.unlink(i)
            #print "DOWNLOADING URL: " + url + "\n";
            urlretrieve(url, pkgs_path + '/' + os.path.basename(url))
            debs_to_unpack.append(pkgs_path + '/' + os.path.basename(url))

        # unpack packages
        if verbose:
            print('Extracting downloaded debs...')
        for i in fetcher.items:
            if not permanent_rootdir or os.path.getctime(i.destfile) > last_written:
                if os.path.isfile(i.destfile):
                    subprocess.check_call(['dpkg', '-x', i.destfile, rootdir])
            real_pkgs.remove(os.path.basename(i.destfile).split('_', 1)[0])

        for debfile in debs_to_unpack:
            #print "UNPACKING NEW DOWNLOADED DEB: " + debfile + " IN: " + rootdir
            subprocess.check_call(['dpkg', '-x', debfile, rootdir])

        if tmp_aptroot:
            shutil.rmtree(aptroot)

        # check bookkeeping that apt fetcher really got everything
        assert not real_pkgs, 'apt fetcher did not fetch these packages: ' \
            + ' '.join(real_pkgs)

        if permanent_rootdir:
            self._save_virtual_mapping(aptroot)

        return obsolete
예제 #14
0
def make_sandbox(report, config_dir, cache_dir=None, sandbox_dir=None,
                 extra_packages=[], verbose=False, log_timestamps=False):
    '''Build a sandbox with the packages that belong to a particular report.

    This downloads and unpacks all packages from the report's Package and
    Dependencies fields, plus all packages that ship the files from ProcMaps
    (often, runtime plugins do not appear in Dependencies), plus optionally
    some extra ones, for the distro release and architecture of the report.

    For unpackaged executables, there are no Dependencies. Packages for shared
    libaries are unpacked.

    report is an apport.Report object to build a sandbox for. Presence of the
    Package field determines whether to determine dependencies through
    packaging (via the optional report['Dependencies'] field), or through ldd
    via needed_runtime_packages() -> shared_libraries().  Usually
    report['Architecture'] and report['Uname'] are present.

    config_dir points to a directory with by-release configuration files for
    the packaging system, or "system"; this is passed to
    apport.packaging.install_packages(), see that method for details.

    cache_dir points to a directory where the downloaded packages and debug
    symbols are kept, which is useful if you create sandboxes very often. If
    not given, the downloaded packages get deleted at program exit.

    sandbox_dir points to a directory with a permanently unpacked sandbox with
    the already unpacked packages. This speeds up operations even further if
    you need to create sandboxes for different reports very often; but the
    sandboxes can become very big over time, and you must ensure that an
    already existing sandbox matches the DistroRelease: and Architecture: of
    report. If not given, a temporary directory will be created which gets
    deleted at program exit.

    extra_packages can specify a list of additional packages to install which
    are not derived from the report.

    If verbose is True (False by default), this will write some additional
    logging to stdout. If log_timestamps is True, these log messages will be
    prefixed with the current time.

    Return a tuple (sandbox_dir, cache_dir, outdated_msg).
    '''
    # sandbox
    if sandbox_dir:
        sandbox_dir = os.path.abspath(sandbox_dir)
        if not os.path.isdir(sandbox_dir):
            os.makedirs(sandbox_dir)
        permanent_rootdir = True
    else:
        sandbox_dir = tempfile.mkdtemp(prefix='apport_sandbox_')
        atexit.register(shutil.rmtree, sandbox_dir)
        permanent_rootdir = False

    # cache
    if cache_dir:
        cache_dir = os.path.abspath(cache_dir)
    else:
        cache_dir = tempfile.mkdtemp(prefix='apport_cache_')
        atexit.register(shutil.rmtree, cache_dir)

    pkgs = []

    # when ProcMaps is available and we don't have any third-party packages, it
    # is enough to get the libraries in it and map their files to packages;
    # otherwise, get Package/Dependencies
    if 'ProcMaps' not in report or '[origin' in (report.get('Package', '') + report.get('Dependencies', '')):
        pkgs = needed_packages(report)

    # add user-specified extra packages, if any
    for p in extra_packages:
        pkgs.append((p, None))
    if config_dir == 'system':
        config_dir = None

    # unpack packages, if any, using cache and sandbox
    try:
        outdated_msg = apport.packaging.install_packages(
            sandbox_dir, config_dir, report['DistroRelease'], pkgs,
            verbose, cache_dir, permanent_rootdir,
            architecture=report.get('Architecture'))
    except SystemError as e:
        sys.stderr.write(str(e) + '\n')
        sys.exit(1)

    pkgs = needed_runtime_packages(report, sandbox_dir, cache_dir, verbose)

    # package hooks might reassign Package:, check that we have the originally
    # crashing binary
    for path in ('InterpreterPath', 'ExecutablePath'):
        if path in report and not os.path.exists(sandbox_dir + report[path]):
            pkg = apport.packaging.get_file_package(report[path], True, cache_dir,
                                                    release=report['DistroRelease'],
                                                    arch=report.get('Architecture'))
            if pkg:
                apport.log('Installing extra package %s to get %s' % (pkg, path), log_timestamps)
                pkgs.append((pkg, None))
            else:
                apport.warning('Cannot find package which ships %s', path)

    # unpack packages for executable using cache and sandbox
    if pkgs:
        try:
            outdated_msg += apport.packaging.install_packages(
                sandbox_dir, config_dir, report['DistroRelease'], pkgs,
                cache_dir=cache_dir, architecture=report.get('Architecture'))
        except SystemError as e:
            sys.stderr.write(str(e) + '\n')
            sys.exit(1)

    # sanity check: for a packaged binary we require having the executable in
    # the sandbox; TODO: for an unpackage binary we don't currently copy its
    # potential local library dependencies (like those in build trees) into the
    # sandbox, and we call gdb/valgrind on the binary outside the sandbox.
    if 'Package' in report:
        for path in ('InterpreterPath', 'ExecutablePath'):
            if path in report and not os.path.exists(sandbox_dir + report[path]):
                apport.error('%s %s does not exist (report specified package %s)',
                             path, sandbox_dir + report[path], report['Package'])
                sys.exit(0)

    if outdated_msg:
        report['RetraceOutdatedPackages'] = outdated_msg

    apport.memdbg('built sandbox')

    return sandbox_dir, cache_dir, outdated_msg
예제 #15
0
        '''Check that we have one crash report, and verify its contents'''

        reports = apport.fileutils.get_new_reports()
        self.assertEqual(len(reports), 1, 'did not create a crash report')
        r = apport.Report()
        with open(reports[0], 'rb') as f:
            r.load(f)
        self.assertEqual(r['ProblemType'], 'Crash')
        self.assertTrue(r['ProcCmdline'].startswith('java -classpath'), r)
        self.assertTrue(r['StackTrace'].startswith(
            "java.lang.RuntimeException: Can't catch this"))
        if '.jar!' in main_file:
            self.assertEqual(r['MainClassUrl'], 'jar:file:' + main_file)
        else:
            self.assertEqual(r['MainClassUrl'], 'file:' + main_file)
        self.assertTrue('DistroRelease' in r)
        self.assertTrue('ProcCwd' in r)

#
# main
#

try:
    subprocess.check_call(['java', '-version'], stdout=subprocess.PIPE,
                          stderr=subprocess.PIPE)
except OSError:
    apport.warning('Java not available, skipping')
    sys.exit(0)

unittest.main()
예제 #16
0
    def install_packages(self, rootdir, configdir, release, packages,
            verbose=False, cache_dir=None):
        '''Install packages into a sandbox (for apport-retrace).

        In order to work without any special permissions and without touching
        the running system, this should only download and unpack packages into
        the given root directory, not install them into the system.

        configdir points to a directory with by-release configuration files for
        the packaging system; this is completely dependent on the backend
        implementation, the only assumption is that this looks into
        configdir/release/, so that you can use retracing for multiple
        DistroReleases. As a special case, if configdir is None, it uses the
        current system configuration, and "release" is ignored.

        release is the value of the report's 'DistroRelease' field.

        packages is a list of ('packagename', 'version') tuples. If the version
        is None, it should install the most current available version.
        
        If cache_dir is given, then the downloaded packages will be stored
        there, to speed up subsequent retraces.

        Return a string with outdated packages, or None if all packages were
        installed.
        '''
        if not configdir:
            apt_sources = '/etc/apt/sources.list'
        else:
            apt_sources = os.path.join(configdir, release, 'sources.list')
        if not os.path.exists(apt_sources):
            raise SystemError('%s does not exist' % apt_sources)

        # create apt sandbox
        if cache_dir:
            tmp_aptroot = False
            if configdir:
                aptroot = os.path.join(cache_dir, release, 'apt')
            else:
                aptroot = os.path.join(cache_dir, 'system', 'apt')
            try:
                os.makedirs(aptroot)
            except OSError:
                pass
        else:
            tmp_aptroot = True
            aptroot = tempfile.mkdtemp()

        self._build_apt_sandbox(aptroot, apt_sources)

        if verbose:
            fetchProgress = apt.progress.TextFetchProgress()
        else:
            fetchProgress = apt.progress.FetchProgress()
        c = apt.Cache()
        c.update(fetchProgress)
        c = apt.Cache()

        obsolete = ''

        # mark packages for installation
        for (pkg, ver) in packages:
            try:
                candidate = c[pkg].candidate
            except KeyError:
                candidate = None
            if not candidate:
                m = 'package %s does not exist, ignoring' % pkg
                obsolete += m + '\n'
                apport.warning(m)
                continue

            if ver and candidate.version != ver:
                w = '%s version %s required, but %s is available' % (pkg, ver, candidate.version)
                obsolete += w + '\n'
            c[pkg].mark_install(False, False)

            if candidate.architecture != 'all':
                if c.has_key(pkg + '-dbg'):
                    c[pkg + '-dbg'].mark_install(False, False)
                elif c.has_key(pkg + '-dbgsym'):
                    c[pkg + '-dbgsym'].mark_install(False, False)
                    if c[pkg + '-dbgsym'].candidate.version != candidate.version:
                        obsolete += 'outdated debug symbol package for %s: package version %s dbgsym version %s\n' % (
                                pkg, candidate.version, c[pkg + '-dbgsym'].candidate.version)


        # fetch packages
        fetcher = apt.apt_pkg.GetAcquire(fetchProgress)
        pm = apt.apt_pkg.GetPackageManager(c._depcache)
        try:
            res = c._fetchArchives(fetcher, pm)
        except apt.cache.FetchFailedException as e:
            apport.error('Package download error, try again later: %s', str(e))
            sys.exit(99) # transient error

        # unpack packages
        if verbose:
            print('Extracting downloaded debs...')
        for i in fetcher.Items:
            subprocess.check_call(['dpkg', '-x', i.DestFile, rootdir])

        if tmp_aptroot:
            shutil.rmtree(aptroot)

        # reset config
        apt.apt_pkg.init_config()

        return obsolete