Beispiel #1
0
    def remove(*package_names) -> None:

        upgrade()

        cache = Cache()
        cache.update()
        cache.open()

        for name in package_names:

            if name not in cache:
                print('Package %s not found!' % (name, ))
                continue

            package = cache[name]

            if not package.is_installed:
                print('Package %s is not installed!' % (name, ))
                continue

            package.mark_delete(purge=True)

        cache.commit(TextFetchProgress(), InstallProgress())

        cache.close()
Beispiel #2
0
    def install(*package_names) -> None:

        upgrade()

        cache = Cache()
        cache.update()
        cache.open()

        for name in package_names:

            if name not in cache:
                logger.error('Package %s not found!' % (name, ))
                continue

            package = cache[name]

            if package.is_installed:
                logger.warning('Package %s already installed!' % (name, ))
                continue

            package.mark_install()

        cache.commit(TextFetchProgress(), InstallProgress())

        cache.close()
Beispiel #3
0
    def upgrade() -> None:

        cache = Cache()
        cache.update()
        cache.open()
        cache.update()
        cache.open()
        cache.upgrade(dist_upgrade=True)
        cache.fix_broken()
        cache.commit(TextFetchProgress(), InstallProgress())
        cache.close()
Beispiel #4
0
def do_update(mark_only):
    _, progress = query_verbosity()

    log.info("Getting list of eligible packages...")
    cache = Cache(progress)
    f_cache = FilteredCache(cache)
    f_cache.set_filter(NvidiaFilter())
    names = f_cache.keys()

    with unhold(names, cache):
        # mark_only means we just want the side-effects of exiting the
        # unhold() context manager.
        if mark_only:
            return False

        log.info("Updating package list...")
        try:
            cache.update()
        except FetchFailedException, err:
            log.warn(err)
        cache.open(progress)  # Refresh package list

        old_versions = {name: cache[name].installed for name in names}
        log.info("Updating all packages...")
        for name in names:
            if cache[name].is_upgradable:
                cache[name].mark_upgrade()
        cache.commit(None, None)

        log.info("Refreshing package cache...")
        cache.open(progress)
        new_versions = {name: cache[name].installed for name in names}

        log.info("Checking whether packages were upgraded...")
        for name in old_versions:
            if old_versions[name] != new_versions[name]:
                log.info("Kernel module changed")
                return True
        return False
Beispiel #5
0
    def do_install(self, to_install, langpacks=False):
        self.nested_progress_start()

        if langpacks:
            self.db.progress('START', 0, 10, 'ubiquity/langpacks/title')
        else:
            self.db.progress('START', 0, 10, 'ubiquity/install/title')
        self.db.progress('INFO', 'ubiquity/install/find_installables')

        self.progress_region(0, 1)
        fetchprogress = DebconfAcquireProgress(
            self.db, 'ubiquity/install/title',
            'ubiquity/install/apt_indices_starting',
            'ubiquity/install/apt_indices')
        cache = Cache()

        if cache._depcache.broken_count > 0:
            syslog.syslog(
                'not installing additional packages, since there are broken '
                'packages: %s' % ', '.join(broken_packages(cache)))
            self.db.progress('STOP')
            self.nested_progress_end()
            return

        for pkg in to_install:
            mark_install(cache, pkg)

        self.db.progress('SET', 1)
        self.progress_region(1, 10)
        if langpacks:
            fetchprogress = DebconfAcquireProgress(
                self.db, 'ubiquity/langpacks/title', None,
                'ubiquity/langpacks/packages')
            installprogress = DebconfInstallProgress(
                self.db, 'ubiquity/langpacks/title',
                'ubiquity/install/apt_info')
        else:
            fetchprogress = DebconfAcquireProgress(
                self.db, 'ubiquity/install/title', None,
                'ubiquity/install/fetch_remove')
            installprogress = DebconfInstallProgress(
                self.db, 'ubiquity/install/title',
                'ubiquity/install/apt_info',
                'ubiquity/install/apt_error_install')
        chroot_setup(self.target)
        commit_error = None
        try:
            try:
                if not self.commit_with_verify(cache,
                                               fetchprogress, installprogress):
                    fetchprogress.stop()
                    installprogress.finishUpdate()
                    self.db.progress('STOP')
                    self.nested_progress_end()
                    return
            except IOError:
                for line in traceback.format_exc().split('\n'):
                    syslog.syslog(syslog.LOG_ERR, line)
                fetchprogress.stop()
                installprogress.finishUpdate()
                self.db.progress('STOP')
                self.nested_progress_end()
                return
            except SystemError, e:
                for line in traceback.format_exc().split('\n'):
                    syslog.syslog(syslog.LOG_ERR, line)
                commit_error = str(e)
        finally:
            chroot_cleanup(self.target)
        self.db.progress('SET', 10)

        cache.open(None)
        if commit_error or cache._depcache.broken_count > 0:
            if commit_error is None:
                commit_error = ''
            brokenpkgs = broken_packages(cache)
            self.warn_broken_packages(brokenpkgs, commit_error)

        self.db.progress('STOP')

        self.nested_progress_end()
class UpdateMaker():
    """A class for making updates for OreSat Linux Updater daemon"""

    def __init__(self, board: str):
        """
        Parameters
        ----------
        board: str
            The board to make the update for.
        """
        self._board = board
        self._status_file = ""
        self._board = board
        self._cache = Cache(rootdir=ROOT_DIR)
        self._cache.update(raise_on_error=False)
        self._cache.open()
        self._deb_pkgs = []
        self._inst_list = []

        # make sure all dir exist
        Path(OLU_DIR).mkdir(parents=True, exist_ok=True)
        Path(ROOT_DIR).mkdir(parents=True, exist_ok=True)
        Path(DOWNLOAD_DIR).mkdir(parents=True, exist_ok=True)
        Path(UPDATE_CACHE_DIR).mkdir(parents=True, exist_ok=True)
        Path(STATUS_CACHE_DIR).mkdir(parents=True, exist_ok=True)

        # clear download dir
        for i in listdir(DOWNLOAD_DIR):
            if i.endswith(".deb"):
                remove(DOWNLOAD_DIR + i)

        status_files = []
        for i in listdir(STATUS_CACHE_DIR):
            status_files.append(OLMFile(load=i))
        status_files.sort()

        # find latest olu status tar file
        for i in status_files:
            if i.name == board:
                self._status_file = STATUS_CACHE_DIR + i.name
                break

        if self._status_file == "":
            msg = "No status file for {} board in cache".format(board)
            raise FileNotFoundError(msg)

        # update status file
        dpkg_data = read_dpkg_status_file(self._status_file)
        with open(DPKG_STATUS_FILE) as fptr:
            fptr.write(dpkg_data)

        # TODO deal with update files that are not installed yet.

    def add_packages(self, packages: list):
        """Add deb packages to be installed.

        Parameters
        ----------
        packages: list
            A list of deb packages to install on the board.
        """

        if packages == []:
            raise ValueError("Requires a list of packages to install")

        inst_deb_pkgs = []

        for pkg in packages:
            pkg_obj = self._cache[pkg]
            pkg_obj.mark_install()  # this will mark all dependencies too

            # find new packages (dependencies) that are marked
            for deb_pkg in self._cache:
                if deb_pkg.marked_install and \
                        deb_pkg.name not in self._deb_pkgs:
                    self._deb_pkgs.append(deb_pkg.name)
                    inst_deb_pkgs.append(deb_pkg.name)

        new_inst = Instruction(InstructionType.DPKG_INSTALL, inst_deb_pkgs)
        self._inst_list.append(new_inst)

    def remove_packages(self, packages: list):
        """Remove deb packages on board.

        Parameters
        ----------
        packages: list
            A list of deb packages to remove on the board.
        """

        if packages == []:
            raise ValueError("Requires a list of packages to remove")

        new_inst = Instruction(InstructionType.DPKG_REMOVE, packages)
        self._inst_list.append(new_inst)

    def purge_packages(self, packages: list):
        """Purge deb packages on board.

        Parameters
        ----------
        packages: list
            A list of deb packages to remove on the board.
        """

        if packages == []:
            raise ValueError("Requires a list of packages to remove")

        new_inst = Instruction(InstructionType.DPKG_PURGE, packages)
        self._inst_list.append(new_inst)

    def add_bash_scripts(self, bash_scipts: list):
        """Run bash scripts on the board.

        Parameters
        ----------
        bash_scipts: list
            A list of bash script to run on the board.
        """

        if bash_scipts == []:
            raise ValueError("Requires a list of bash scipts to run")

        new_inst = Instruction(InstructionType.BASH_SCRIPT, bash_scipts)
        self._inst_list.append(new_inst)

    def add_support_files(self, support_files: list):
        """Add a support files to update archive.

        Parameters
        ----------
        support_files: list
            A list of support files to add to the update.
        """

        for s_file in support_files:
            if isfile(s_file):
                raise ValueError(" {} was not found".format(s_file))

        new_inst = Instruction(InstructionType.SUPPORT_FILE, support_files)
        self._inst_list.append(new_inst)

    def status(self):
        """Print the contexts of instructions list"""

        for i in self._inst_list:
            print(i)

    def make_update_archive(self):
        """Make the update archive"""

        # download deb files
        self._cache.fetch_archives()

        # replace package name with deb filepath in instruction obj
        for inst in self._inst_list:
            if not inst.type == InstructionType.DPKG_INSTALL:
                continue

            for i in range(len(inst.items)):
                found = False
                for deb_file in listdir(DOWNLOAD_DIR):
                    if not deb_file.endswith(".deb"):
                        continue

                    if deb_file.startswith(inst.items[i]+"_"):
                        inst.items[i] = DOWNLOAD_DIR + deb_file
                        found = True
                        break

                if found is True:
                    break

        print("Making tar")

        update_file = create_update_file(self._board, self._inst_list, "./")

        print("{} was made".format(update_file))
Beispiel #7
0
class _AptChangelog():

    def __init__(self, interactive:bool=False):
        self.interactive = interactive

        # constants
        # apt uses MB rather than MiB, so let's stay consistent
        self.MB = 1000 ** 2
        # downloads larger than this require confirmation or fail
        self.max_download_size_default = 1.5 * self.MB
        self.max_download_size = self.max_download_size_default
        max_download_size_msg_template = "\
To retrieve the full changelog, %s MB have to be downloaded.\n%s\
\n\
Proceed with the download?"
        self.max_download_size_msg_lc = max_download_size_msg_template % ("%.1f",
            "Otherwise we will try to retrieve just the last change.\n")
        self.max_download_size_msg = max_download_size_msg_template % ("%.1f","")
        self.max_download_size_msg_unknown = max_download_size_msg_template % ("an unknown amount of", "")

        self.apt_cache = None
        self.apt_cache_date = None
        self.candidate = None

        # get apt's configuration
        apt_pkg.init_config()
        if apt_pkg.config.exists("Acquire::Changelogs::URI::Origin"):
            self.apt_origins = apt_pkg.config.subtree("Acquire::Changelogs::URI::Origin")
        else:
            self.apt_origins = None
        if apt_pkg.config.exists("Dir::Cache::pkgcache"):
            self.apt_cache_path = apt_pkg.config.find_dir("Dir::Cache")
            self.pkgcache = apt_pkg.config.find_file("Dir::Cache::pkgcache")
        else:
            self.apt_cache = "invalid"
        if (self.apt_cache or
            not os.path.isdir(self.apt_cache_path) or
            not os.path.isfile(self.pkgcache)
            ):
            print("E: Invalid APT configuration found, try to run `apt update` first",
                file=sys.stderr)
            self.close(99)

    def get_cache_date(self):
        if os.path.isfile(self.pkgcache):
            return os.path.getmtime(self.pkgcache)
        return None

    def refresh_cache(self):
        cache_date = self.get_cache_date()

        if not self.apt_cache:
            self.apt_cache = Cache()
            self.apt_cache_date = cache_date
        elif cache_date != self.apt_cache_date:
            self.apt_cache.open(None)
            self.apt_cache_date = cache_date

    def drop_cache(self):
        if self.candidate:
            self.candidate = None
        self.apt_cache = None

    def get_changelog(self, pkg_name:str, no_local:bool=False):
        self.refresh_cache()
        self.candidate = self.parse_package_metadata(pkg_name)

        # parse the package's origin
        if not self.candidate.downloadable:
            origin = "local_package"
        elif self.candidate.origin == "linuxmint":
            origin = "linuxmint"
        elif self.candidate.origin.startswith("LP-PPA-"):
            origin = "LP-PPA"
        elif self.apt_origins and self.candidate.origin in self.apt_origins.list():
            origin = "APT"
        else:
            origin = "unsupported"

        # Check for changelog of installed package first
        has_local_changelog = False
        uri = None
        if not no_local and self.candidate.is_installed:
            if _DEBUG: print("Package is installed...")
            uri = self.get_changelog_from_filelist(
                self.candidate.installed_files, local=True)
            # Ubuntu kernel workarounds
            if self.candidate.origin == "Ubuntu":
                if self.candidate.source_name == "linux-signed":
                    uri = uri.replace("linux-image","linux-modules")
                if self.candidate.source_name == "linux-meta":
                    uri = None
            if uri and not os.path.isfile(uri):
                uri = None

        # Do nothing if local changelog exists
        if uri:
            has_local_changelog = True
        # all origins that APT supports
        elif origin == 'APT':
            uri = self.get_apt_changelog_uri(
                self.apt_origins.get(self.candidate.origin))
            r = self.check_url(uri)
            if not r:
                self.exit_on_fail(2)
        # Linux Mint repo
        elif origin == 'linuxmint':
            # Mint repos don't have .debian.tar.xz files, only full packages, so
            # check the package cache first
            base_uri, _ = os.path.split(self.candidate.uri)
            r, uri = self.get_changelog_uri(base_uri)
            if not r:
                # fall back to last change info for the source package
                # Mint's naming scheme seems to be using amd64 unless source
                # is i386 only, we always check amd64 first
                base_uri = "http://packages.linuxmint.com/dev/%s_%s_%s.changes"
                uri = base_uri % (self.candidate.source_name,
                    self.candidate.source_version, "amd64")
                r = self.check_url(uri, False)
                if not r:
                    uri = base_uri % (self.candidate.source_name,
                        self.candidate.source_version, "i386")
                    r = self.check_url(uri, False)
                    if not r:
                        self.exit_on_fail(3)

        # Launchpad PPA
        elif origin == 'LP-PPA':
            ppa_owner, ppa_name, _ = \
                self.candidate.uri.split("ppa.launchpad.net/")[1].split("/", 2)
            base_uri = "http://ppa.launchpad.net/%s/%s/ubuntu/pool/main/{self.source_prefix()}/%s" % (ppa_owner, ppa_name, self.candidate.source_name)
            r, uri = self.get_changelog_uri(base_uri)
            if not r:
                # fall back to last change info only
                uri = "https://launchpad.net/~%s/+archive/ubuntu/%s/+files/%s_%s_source.changes" % (ppa_owner, ppa_name, self.candidate.source_name, self.candidate.source_version)
                r = self.check_url(uri, False)
                if not r:
                    self.exit_on_fail(4)
        # Not supported origin
        elif origin == 'unsupported':
            if _DEBUG: print("Unsupported Package")
            base_uri, _ = os.path.split(self.candidate.uri)
            r, uri = self.get_changelog_uri(base_uri)
            if not r:
                self.exit_on_fail(5)
        # Locally installed package without local changelog or remote
        # source, hope it's cached and contains a changelog
        elif origin == 'local_package':
            uri = self.apt_cache_path + self.candidate.filename
            if not os.path.isfile(uri):
                self.exit_on_fail(6)

        # Changelog downloading, extracting and processing:
        changelog = ""
        # local changelog
        if has_local_changelog and not no_local:
            if _DEBUG: print("Using local changelog:",uri)
            try:
                filename = os.path.basename(uri)
                # determine file type by name/extension
                # as per debian policy 4.4 the encoding must be UTF-8
                # as per policy 12.7 the name must be changelog.Debian.gz or
                # changelog.gz (deprecated)
                if filename.lower().endswith('.gz'):
                    changelog = gzip.open(uri,'r').read().decode('utf-8')
                elif filename.lower().endswith('.xz'):
                    # just in case / future proofing
                    changelog = lzma.open(uri,'r').read().decode('utf-8')
                elif filename.lower() == 'changelog':
                    changelog = open(uri, 'r').read().encode().decode('utf-8')
                else:
                    raise ValueError('Unknown changelog format')
            except Exception as e:
                _generic_exception_handler(e)
                self.exit_on_fail(1)
        # APT-format changelog, download directly
        # - unfortunately this is slow since the servers support no compression
        elif origin == "APT":
            if _DEBUG: print("Downloading: %s (%.2f MB)" % (uri, r.length / self.MB))
            changelog = r.text
            r.close()
        # last change changelog, download directly
        elif uri.endswith('.changes'):
            if _DEBUG: print("Downloading: %s (%.2f MB)" % (uri, r.length / self.MB))
            changes = r.text.split("Changes:")[1].split("Checksums")[0].split("\n")
            r.close()
            for change in changes:
                change = change.strip()
                if change:
                    if change == ".":
                        change = ""
                    changelog += change + "\n"
        # compressed binary source, download and extract changelog
        else:
            source_is_cache = uri.startswith(self.apt_cache_path)
            if _DEBUG: print("Using cached package:" if source_is_cache else
                "Downloading: %s (%.2f MB)" % (uri, r.length / self.MB))
            try:
                if not source_is_cache:
                    # download stream to temporary file
                    tmpFile = tempfile.NamedTemporaryFile(prefix="apt-changelog-")
                    if self.interactive and r.length:
                        # download chunks with progress indicator
                        recv_length = 0
                        blocks = 60
                        for data in r.iter_content(chunk_size=16384):
                            recv_length += len(data)
                            tmpFile.write(data)
                            recv_pct = recv_length / r.length
                            recv_blocks = int(blocks * recv_pct)
                            print("\r[%(progress)s%(spacer)s] %(percentage).1f%%" %
                                {
                                    "progress": "=" * recv_blocks,
                                    "spacer":  " " * (blocks - recv_blocks),
                                    "percentage": recv_pct * 100
                                }, end="", flush=True)
                        # clear progress bar when done
                        print("\r" + " " * (blocks + 10), end="\r", flush=True)
                    else:
                        # no content-length or non-interactive, download in one go
                        # up to the configured max_download_size, ask only when
                        # exceeded
                        r.raw.decode_content = True
                        size = 0
                        size_exceeded = False
                        while True:
                            buf = r.raw.read(16*1024)
                            if not size_exceeded:
                                size += len(buf)
                                if size > self.max_download_size:
                                    if not self.user_confirm(self.max_download_size_msg_unknown):
                                        r.close()
                                        tmpFile.close()
                                        return ""
                                    else:
                                        size_exceeded = True
                            if not buf:
                                break
                            tmpFile.write(buf)
                    r.close()
                    tmpFile.seek(0)
                if uri.endswith(".deb"):
                    # process .deb file
                    if source_is_cache:
                        f = uri
                    else:
                        f = tmpFile.name
                        # We could copy the downloaded .deb files to the apt
                        # cache here but then we'd need to run the script elevated:
                        # shutil.copy(f, self.apt_cache_path + os.path.basename(uri))
                    deb = DebPackage(f)
                    changelog_file = self.get_changelog_from_filelist(deb.filelist)
                    if changelog_file:
                        changelog = deb.data_content(changelog_file)
                        if changelog.startswith('Automatically decompressed:'):
                            changelog = changelog[29:]
                    else:
                        raise ValueError('Malformed Debian package')
                elif uri.endswith(".diff.gz"):
                    # Ubuntu partner repo has .diff.gz files,
                    # we can extract a changelog from that
                    data = gzip.open(tmpFile.name, "r").read().decode('utf-8')
                    additions = data.split("+++")
                    for addition in additions:
                        lines = addition.split("\n")
                        if "/debian/changelog" in lines[0]:
                            for line in lines[2:]:
                                if line.startswith("+"):
                                    changelog += "%s\n" % line[1:]
                                else:
                                    break
                    if not changelog:
                        raise ValueError('No changelog in .diff.gz')
                else:
                    # process .tar.xz file
                    with tarfile.open(fileobj=tmpFile, mode="r:xz") as tar:
                        changelog_file = self.get_changelog_from_filelist(
                            [s.name for s in tar.getmembers() if s.type in (b"0", b"2")])
                        if changelog_file:
                            changelog = tar.extractfile(changelog_file).read().decode()
                        else:
                            raise ValueError('No changelog in source package')
            except Exception as e:
                _generic_exception_handler(e)
                self.exit_on_fail(520)
            if 'tmpFile' in vars():
                try:
                    tmpFile.close()
                except Exception as e:
                    _generic_exception_handler(e)

        # ALL DONE
        return changelog

    def parse_package_metadata(self, pkg_name:str):
        """ Creates the self.candidate object based on package name=version/release

        Wildcard matching is only used for version and release, and only the
        first match is processed.
        """
        # parse =version declaration
        if "=" in pkg_name:
            (pkg_name, pkg_version) = pkg_name.split("=", 1)
            pkg_release = None
        # parse /release declaration (only if no version specified)
        elif "/" in pkg_name:
            (pkg_name, pkg_release) = pkg_name.split("/", 1)
            pkg_version = None
        else:
            pkg_version = None
            pkg_release = None

        # check if pkg_name exists
        # unlike apt no pattern matching, a single exact match only
        if pkg_name in self.apt_cache:
            pkg = self.apt_cache[pkg_name]
        else:
            print("E: Unable to locate package %s" % pkg_name, file=sys.stderr)
            self.close(13)

        # get package data
        _candidate = None
        candidate = None
        if pkg_release or pkg_version:
            match_found = False
            for _pkg in pkg.versions:
                if pkg_version:
                    if fnmatch.fnmatch(_pkg.version, pkg_version):
                        match_found = True
                else:
                    for _origin in _pkg.origins:
                        if fnmatch.fnmatch(_origin.archive, pkg_release):
                            match_found = True
                if match_found:
                    _candidate = _pkg
                    break
            if not match_found:
                if pkg_release:
                    print('E: Release "%s" is unavailable for "%s"' % (pkg_release, pkg.name),
                          file=sys.stderr)
                else:
                    print('E: Version "%s" is unavailable for "%s"' % (pkg_version, pkg.name),
                          file=sys.stderr)
                self.close(14)
        else:
            _candidate = pkg.candidate
        candidate = _Package(
            version = _candidate.version,
            name = _candidate.package.name,
            fullname = None,
            architecture = pkg.architecture,
            source_name = _candidate.source_name,
            source_version = _candidate.source_version,
            uri = _candidate.uri,
            filename = os.path.basename(_candidate.filename),
            origin = _candidate.origins[0].origin,
            component = _candidate.origins[0].component,
            downloadable = _candidate.downloadable,
            is_installed = _candidate.is_installed,
            dependencies = _candidate.dependencies
        )
        if candidate.is_installed:
            candidate.installed_files = pkg.installed_files
        candidate.source_version_raw = candidate.source_version
        if ":" in candidate.source_version:
            candidate.source_version = candidate.source_version.split(":", 1)[1]
        return candidate

    def check_url(self, url:str, check_size:bool=True, stream:bool=True,
        msg:str=None):
        """ True if url can be downloaded and fits size requirements """
        if _DEBUG: print("Checking:", url)
        try:
            _r = requests.get(url, stream=stream, timeout=5)
        except Exception as e:
            _generic_exception_handler(e)
        else:
            if _r:
                if not _r.encoding:
                    _r.encoding = "utf-8"
                length = _r.headers.get("Content-Length")
                if length:
                    _r.length = int(length)
                else:
                    _r.length = 0
                if (not check_size or not
                    (check_size and _r.length > self.max_download_size and not
                    self.user_confirm(
                        (self.max_download_size_msg_lc if not msg else msg) %
                        (_r.length / self.MB))
                    )):
                    return _r
        if '_r' in vars():
            _r.close()
        return False

    @staticmethod
    def close(err:int=0):
        """ Exit """
        sys.exit(err)

    def exit_on_fail(self, err:int=404):
        """ Prints error message and calls self.close() """
        try:
            details = "Changelog unavailable for %s=%s" % (self.candidate.source_name, self.candidate.source_version_raw)
        except AttributeError:
            details = ""
        print("E: Failed to fetch changelog. %s" % details, file=sys.stderr)
        self.close(err)

    @staticmethod
    def strtobool (val):
        val = val.lower()
        if val in ('y', 'yes'):
            return True
        elif val in ('n', 'no'):
            return False
        else:
            raise ValueError("Invalid response value %s" % val)

    def user_confirm(self, q:str):
        """ returns bool (always False in non-interactive mode) """
        if not self.interactive:
            if _DEBUG: print("Maximum size exceeded, skipping in non-interactive mode")
            return False
        print("%s [y/n] " % q, end="")
        while True:
            try:
                response = self.strtobool(input())
                print("")
                return response
            except ValueError:
                print("Invalid response. Try again [y/n]: ", end="")
            except KeyboardInterrupt:
                pass

    def get_deb_or_tar(self, uri_tar:str=None):
        """ Returns request and URI of the preferred source

        The choice is made based on availability and size. If .deb is smaller
        than comparison_trigger_size, or if check_tar is False, then .deb is
        always selected.
        """
        comparison_trigger_size = 50000
        r_deb = self.check_url(self.candidate.uri, False)
        if r_deb:
            if uri_tar and r_deb.length > comparison_trigger_size:
                # try for .tar.xz
                r_tar = self.check_url(uri_tar, False)
                # validate and compare sizes
                if r_tar and r_tar.length < r_deb.length:
                    _r = r_tar
                    r_deb.close()
                else:
                    _r = r_deb
                    if r_tar:
                        r_tar.close()
            else:
                _r = r_deb
            if (not _r.length > self.max_download_size or
                self.user_confirm(self.max_download_size_msg_lc %
                (_r.length / self.MB))
                ):
                return (_r, _r.url)
        return (False, "")

    def get_changelog_from_filelist(self, filelist:list, local:bool=False):
        """ Returns hopefully the correct "changelog" or an empty string.

        We should not need to be searching because the debian policy says it
        must be at debian/changelog for source packages but not all seem to
        adhere to the policy:
        https://www.debian.org/doc/debian-policy/ch-source.html#debian-changelog-debian-changelog

        """
        files = [s for s in filelist if "changelog" in s.lower()]
        if local:
            testpath = "/usr/share/doc/%s/changelog" % self.candidate.name
            for item in files:
                if item.lower().startswith(testpath):
                    return item
        else:
            testpath = "debian/changelog"
            if testpath in files:
                return testpath
            testpath = "recipe/debian/changelog"
            if testpath in files:
                return testpath
            testpath = "usr/share/doc/%s/changelog" % self.candidate.name
            for item in files:
                if item.lower().startswith(testpath):
                    return item
            # no hits in the standard locations, let's try our luck in
            # random locations at the risk of getting the wrong file
            for item in files:
                if os.path.basename(item).lower().startswith("changelog"):
                    return item
        return None

    def get_apt_changelog_uri(self, uri_template:str):
        """ Returns URI based on provided apt changelog URI template.

        Emulates apt's std::string pkgAcqChangelog::URI
        The template must contain the @CHANGEPATH@ variable, which will
        be expanded to
            COMPONENT/SRC/SRCNAME/SRCNAME_SRCVER
        Component is omitted for releases without one (= flat-style
        repositories).
        """
        source_version = self.candidate.source_version

        def get_kernel_version_from_meta_package(pkg):
            for dependency in pkg.dependencies:
                if not dependency.target_versions or not dependency.rawtype == "Depends":
                    if _DEBUG: print("W: Kernel dependency not found:", dependency)
                    return None
                deppkg = dependency.target_versions[0]
                if deppkg.source_name in ("linux", "linux-signed"):
                    return deppkg.source_version
                if deppkg.source_name.startswith("linux-meta"):
                    _pkg = self.parse_package_metadata(str(deppkg))
                    return get_kernel_version_from_meta_package(_pkg)
            return None

        # Ubuntu kernel meta package workaround
        if self.candidate.origin == "Ubuntu" and \
           self.candidate.source_name.startswith("linux-meta"):
            _source_version = get_kernel_version_from_meta_package(self.candidate)
            if _source_version:
                source_version = _source_version
                self.candidate.source_name = "linux"

        # Ubuntu signed kernel workaround
        if self.candidate.origin == "Ubuntu" and \
           self.candidate.source_name == "linux-signed":
            self.candidate.source_name = "linux"

        # XXX:  Debian does not seem to reliably keep changelogs for previous
        #       (kernel) versions, so should we always look for the latest
        #       version instead on Debian? apt does not do this but the
        #       packages.debian.org website shows the latest version in the
        #       selected archive

        # strip epoch
        if ":" in source_version:
            source_version = source_version.split(":", 1)[1]

        # the path is: COMPONENT/SRC/SRCNAME/SRCNAME_SRCVER, e.g.
        #   main/a/apt/apt_1.1 or contrib/liba/libapt/libapt_2.0
        return uri_template.replace('@CHANGEPATH@',
            "%(component)s%(source_prefix)s/%(source_name)s/%(source_name)s_%(source_version)s" %
            {
                "component": self.candidate.component + "/" if \
                    self.candidate.component and \
                    self.candidate.component != "" else "",
                "source_prefix": self.source_prefix(),
                "source_name": self.candidate.source_name,
                "source_version": source_version
            })

    def source_prefix(self, source_name:str=None):
        """ Return prefix used for build repository URL """
        if not source_name:
            source_name = self.candidate.source_name
        return source_name[0] if not source_name.startswith("lib") else \
            source_name[:4]

    def parse_dsc(self, url:str):
        """ Returns filename or None """
        _r = self.check_url(url, False, False)
        if _r:
            target = ""
            lines = _r.text.split("Files:", 1)[1].split(":", 1)[0].split("-----BEGIN", 1)[0].split("\n")
            target = [s.strip() for s in lines if s.strip().lower().endswith('.debian.tar.xz')]
            if not target:
                target = [s.strip() for s in lines if s.strip().lower().endswith('.diff.gz')]
            if not target:
                target = [s.strip() for s in lines if s.strip().lower().endswith('.tar.xz')]
            # don't even test for .tar.gz, it will be too big compared to the .deb
            # if not target:
            #     target = [s.strip() for s in lines if s.strip().lower().endswith('.tar.gz')]
            if target:
                return target[0].split()[-1]
            elif _DEBUG: print(".dsc parse error for", url)
        return None

    def get_changelog_uri(self, base_uri:str):
        """ Tries to find a changelog in files listed in .dsc, locally cached
        packages as well as the remote .deb file

        Returns r and uri
        """
        uri = None
        # XXX:  For APT sources we could just read the apt_pkg.SourceRecords()
        #       directly, if available, which it is not for most users, so
        #       probably not worth it
        target_filename = self.parse_dsc("%s/%s_%s.dsc" % (base_uri, self.candidate.source_name, self.candidate.source_version))
        # get .debian.tar.xz or .diff.gz as a priority as the smallest options
        if (base_uri and target_filename and (
                target_filename.lower().endswith('.debian.tar.xz') or
                target_filename.lower().endswith('.diff.gz')
            )):
            uri = "%s/%s" % (base_uri, target_filename)
            target_filename = None
            r = self.check_url(uri, msg = self.max_download_size_msg)
        else:
            r = None
        if not r:
            # fall back to cached local package
            uri = self.apt_cache_path + self.candidate.filename
            if not os.path.isfile(uri):
                # cache miss, download the full source package or the .deb,
                # depending on size and availability
                if target_filename:
                    uri_tar = "%s/%s" % (base_uri, target_filename)
                else:
                    uri_tar = None
                r, uri = self.get_deb_or_tar(uri_tar)
        return (r, uri)
Beispiel #8
0
class Transition(dbus.service.Object):
    def __init__(self, conn=None, object_path=None, bus_name=None):
        super().__init__(conn, object_path, bus_name)

        self.dbus_info = None
        self.polkit = None
        self.enforce_polkit = True
        self.cache = Cache()
        self.lock = None
        self.apt_lock = None
    
    @dbus.service.method(
        'org.pop_os.transition_system.Interface', 
        in_signature='', out_signature='b',
        sender_keyword='sender', connection_keyword='conn'
    )
    def obtain_lock(self, sender=None, conn=None):
        """ Lock the package system. """
        self._check_polkit_privilege(
            sender, conn, 'org.pop_os.transition_system.removedebs'
        )
        print('Obtaining Package manager lock')
        try:
            self.lock = apt_pkg.get_lock('/var/lib/dpkg/lock-frontend', True)
            self.apt_lock = apt_pkg.get_lock('/var/lib/apt/lists/lock', True)
            print('Lock obtained')
            return True
        except apt_pkg.Error:
            print('Could not obtain lock')
            self.lock = None
            self.apt_lock = None
            return False
    
    @dbus.service.method(
        'org.pop_os.transition_system.Interface', 
        in_signature='', out_signature='b',
        sender_keyword='sender', connection_keyword='conn'
    )
    def release_lock(self, sender=None, conn=None):
        """ Unlock the package system. """
        self._check_polkit_privilege(
            sender, conn, 'org.pop_os.transition_system.removedebs'
        )
        print('Releasing package manager lock')
        try:
            os.close(self.lock)
            os.close(self.apt_lock)
            self.lock = None
            self.apt_lock = None
            print('Lock released')
            return True
        except:
            print('Could not release lock')
            return False
    
    @dbus.service.method(
        'org.pop_os.transition_system.Interface', 
        in_signature='', out_signature='b',
        sender_keyword='sender', connection_keyword='conn'
    )
    def open_cache(self, sender=None, conn=None):
        """ Open the package cache. """
        self._check_polkit_privilege(
            sender, conn, 'org.pop_os.transition_system.removedebs'
        )
        if self.lock and self.apt_lock:
            print('Opening package cache')
            self.cache.update()
            self.cache.open()
            print('Cache open')
            return True
        print('No lock, cannot open cache')
        return False
    
    @dbus.service.method(
        'org.pop_os.transition_system.Interface', 
        in_signature='', out_signature='b',
        sender_keyword='sender', connection_keyword='conn'
    )
    def commit_changes(self, sender=None, conn=None):
        """ Commit changes to the cache. """
        self._check_polkit_privilege(
            sender, conn, 'org.pop_os.transition_system.removedebs'
        )
        if self.lock and self.apt_lock:
            self.cache.commit()
            print('Committed changes to cache')
            return True
        print('No lock, Cannot commit changes')
        return False
    
    @dbus.service.method(
        'org.pop_os.transition_system.Interface', 
        in_signature='', out_signature='b',
        sender_keyword='sender', connection_keyword='conn'
    )
    def close_cache(self, sender=None, conn=None):
        """ Close the package cache. """
        self._check_polkit_privilege(
            sender, conn, 'org.pop_os.transition_system.removedebs'
        )
        if self.lock and self.apt_lock:
            self.cache.close()
            print('Package cache closed')
            return True
        print('No lock, cannot close cache')
        return False

    @dbus.service.method(
        'org.pop_os.transition_system.Interface', 
        in_signature='s', out_signature='s',
        sender_keyword='sender', connection_keyword='conn'
    )
    def remove_package(self, package, sender=None, conn=None):
        """ Mark a package for removal."""
        self._check_polkit_privilege(
            sender, conn, 'org.pop_os.transition_system.removedebs'
        )
        if self.lock and self.apt_lock:
            print(f'Marking {package} for removal')
            try:
                pkg = self.cache[package]
                pkg.mark_delete()
                return pkg.name
            except:
                print(f'Could not mark {package} for removal')
                return ''
        print('No lock, cannot mark packages')
        return ''

    @dbus.service.method(
        'org.pop_os.transition_system.Interface', 
        in_signature='', out_signature='',
        sender_keyword='sender', connection_keyword='conn'
    )
    def exit(self, sender=None, conn=None):
        if self.lock and self.apt_lock:
            self.close_cache()
            self.release_lock()
        mainloop.quit()

    def _check_polkit_privilege(self, sender, conn, privilege):
        '''Verify that sender has a given PolicyKit privilege.
        sender is the sender's (private) D-BUS name, such as ":1:42"
        (sender_keyword in @dbus.service.methods). conn is
        the dbus.Connection object (connection_keyword in
        @dbus.service.methods). privilege is the PolicyKit privilege string.
        This method returns if the caller is privileged, and otherwise throws a
        PermissionDeniedByPolicy exception.
        '''

        if sender is None and conn is None:
            # Called locally, not through D-Bus
            return
        
        if not self.enforce_polkit:
            # For testing
            return
        
        if self.dbus_info is None:
            self.dbus_info = dbus.Interface(conn.get_object('org.freedesktop.DBus',
                '/org/freedesktop/DBus/Bus', False), 'org.freedesktop.DBus')
        pid = self.dbus_info.GetConnectionUnixProcessID(sender)
        
        if self.polkit is None:
            self.polkit = dbus.Interface(dbus.SystemBus().get_object(
                'org.freedesktop.PolicyKit1',
                '/org/freedesktop/PolicyKit1/Authority', False),
                'org.freedesktop.PolicyKit1.Authority'
            )
        
        try:
            (is_auth, _, details) = self.polkit.CheckAuthorization(
                ('unix-process', {'pid': dbus.UInt32(pid, variant_level=1),
                'start-time': dbus.UInt64(0, variant_level=1)}), 
                privilege, {'': ''}, dbus.UInt32(1), '', timeout=600
            )

        except dbus.DBusException as e:
            if e._dbus_error_name == 'org.freedesktop.DBus.Error.ServiceUnknown':
                # polkitd timed out, connect again
                self.polkit = None
                return self._check_polkit_privilege(sender, conn, privilege)
            else:
                raise
        
        if not is_auth:
            raise PermissionDeniedByPolicy(privilege)
Beispiel #9
0
class _AptChangelog():

    def __init__(self, interactive:bool=False):
        self.interactive = interactive

        # constants
        # apt uses MB rather than MiB, so let's stay consistent
        self.MB = 1000 ** 2
        # downloads larger than this require confirmation or fail
        self.max_download_size_default = 1.5 * self.MB
        self.max_download_size = self.max_download_size_default
        max_download_size_msg_template = "\
To retrieve the full changelog, %s MB have to be downloaded.\n%s\
\n\
Proceed with the download?"
        self.max_download_size_msg_lc = max_download_size_msg_template % ("%.1f",
            "Otherwise we will try to retrieve just the last change.\n")
        self.max_download_size_msg = max_download_size_msg_template % ("%.1f","")
        self.max_download_size_msg_unknown = max_download_size_msg_template % ("an unknown amount of", "")

        self.apt_cache = None
        self.apt_cache_date = None
        self.candidate = None

        # get apt's configuration
        apt_pkg.init_config()
        if apt_pkg.config.exists("Acquire::Changelogs::URI::Origin"):
            self.apt_origins = apt_pkg.config.subtree("Acquire::Changelogs::URI::Origin")
        else:
            self.apt_origins = None
        if apt_pkg.config.exists("Dir::Cache::pkgcache"):
            self.apt_cache_path = apt_pkg.config.find_dir("Dir::Cache")
            self.pkgcache = apt_pkg.config.find_file("Dir::Cache::pkgcache")
        else:
            self.apt_cache = "invalid"
        if (self.apt_cache or
            not os.path.isdir(self.apt_cache_path) or
            not os.path.isfile(self.pkgcache)
            ):
            print("E: Invalid APT configuration found, try to run `apt update` first",
                file=sys.stderr)
            self.close(99)

    def get_cache_date(self):
        if os.path.isfile(self.pkgcache):
            return os.path.getmtime(self.pkgcache)
        return None

    def refresh_cache(self):
        cache_date = self.get_cache_date()

        if not self.apt_cache:
            self.apt_cache = Cache()
            self.apt_cache_date = cache_date
        elif cache_date != self.apt_cache_date:
            self.apt_cache.open(None)
            self.apt_cache_date = cache_date

    def drop_cache(self):
        if self.candidate:
            self.candidate = None
        self.apt_cache = None

    def get_changelog(self, pkg_name:str, no_local:bool=False):
        self.refresh_cache()
        self.candidate = self.parse_package_metadata(pkg_name)

        # parse the package's origin
        if not self.candidate.downloadable:
            origin = "local_package"
        elif self.candidate.origin == "linuxmint":
            origin = "linuxmint"
        elif self.candidate.origin.startswith("LP-PPA-"):
            origin = "LP-PPA"
        elif self.apt_origins and self.candidate.origin in self.apt_origins.list():
            origin = "APT"
        else:
            origin = "unsupported"

        # Check for changelog of installed package first
        has_local_changelog = False
        uri = None
        if not no_local and self.candidate.is_installed:
            if _DEBUG: print("Package is installed...")
            uri = self.get_changelog_from_filelist(
                self.candidate.installed_files, local=True)
            # Ubuntu kernel workarounds
            if self.candidate.origin == "Ubuntu":
                if self.candidate.source_name == "linux-signed":
                    uri = uri.replace("linux-image","linux-modules")
                if self.candidate.source_name == "linux-meta":
                    uri = None
            if uri and not os.path.isfile(uri):
                uri = None

        # Do nothing if local changelog exists
        if uri:
            has_local_changelog = True
        # all origins that APT supports
        elif origin == 'APT':
            uri = self.get_apt_changelog_uri(
                self.apt_origins.get(self.candidate.origin))
            r = self.check_url(uri)
            if not r:
                self.exit_on_fail(2)
        # Linux Mint repo
        elif origin == 'linuxmint':
            # Mint repos don't have .debian.tar.xz files, only full packages, so
            # check the package cache first
            base_uri, _ = os.path.split(self.candidate.uri)
            r, uri = self.get_changelog_uri(base_uri)
            if not r:
                # fall back to last change info for the source package
                # Mint's naming scheme seems to be using amd64 unless source
                # is i386 only, we always check amd64 first
                base_uri = "http://packages.linuxmint.com/dev/%s_%s_%s.changes"
                uri = base_uri % (self.candidate.source_name,
                    self.candidate.source_version, "amd64")
                r = self.check_url(uri, False)
                if not r:
                    uri = base_uri % (self.candidate.source_name,
                        self.candidate.source_version, "i386")
                    r = self.check_url(uri, False)
                    if not r:
                        self.exit_on_fail(3)

        # Launchpad PPA
        elif origin == 'LP-PPA':
            ppa_owner, ppa_name, _ = \
                self.candidate.uri.split("ppa.launchpad.net/")[1].split("/", 2)
            base_uri = "http://ppa.launchpad.net/%(owner)s/%(name)s/ubuntu/pool/main/%(source_prefix)s/%(source_name)s" % \
                {
                    "owner": ppa_owner,
                    "name": ppa_name,
                    "source_prefix": self.source_prefix(),
                    "source_name": self.candidate.source_name
                }

            r, uri = self.get_changelog_uri(base_uri)
            if not r:
                # fall back to last change info only
                uri = "https://launchpad.net/~%(owner)s/+archive/ubuntu/%(name)s/+files/%(source_name)s_%(source_version)s_source.changes" % \
                    {
                        "owner" : ppa_owner,
                        "name": ppa_name,
                        "source_name": self.candidate.source_name,
                        "source_version": self.candidate.source_version
                    }
                r = self.check_url(uri, False)
                if not r:
                    self.exit_on_fail(4)
        # Not supported origin
        elif origin == 'unsupported':
            if _DEBUG: print("Unsupported Package")
            base_uri, _ = os.path.split(self.candidate.uri)
            r, uri = self.get_changelog_uri(base_uri)
            if not r:
                self.exit_on_fail(5)
        # Locally installed package without local changelog or remote
        # source, hope it's cached and contains a changelog
        elif origin == 'local_package':
            uri = self.apt_cache_path + self.candidate.filename
            if not os.path.isfile(uri):
                self.exit_on_fail(6)

        # Changelog downloading, extracting and processing:
        changelog = ""
        # local changelog
        if has_local_changelog and not no_local:
            if _DEBUG: print("Using local changelog:",uri)
            try:
                filename = os.path.basename(uri)
                # determine file type by name/extension
                # as per debian policy 4.4 the encoding must be UTF-8
                # as per policy 12.7 the name must be changelog.Debian.gz or
                # changelog.gz (deprecated)
                if filename.lower().endswith('.gz'):
                    changelog = gzip.open(uri,'r').read().decode('utf-8')
                elif filename.lower().endswith('.xz'):
                    # just in case / future proofing
                    changelog = lzma.open(uri,'r').read().decode('utf-8')
                elif filename.lower() == 'changelog':
                    changelog = open(uri, 'r').read().encode().decode('utf-8')
                else:
                    raise ValueError('Unknown changelog format')
            except Exception as e:
                _generic_exception_handler(e)
                self.exit_on_fail(1)
        # APT-format changelog, download directly
        # - unfortunately this is slow since the servers support no compression
        elif origin == "APT":
            if _DEBUG: print("Downloading: %s (%.2f MB)" % (uri, r.length / self.MB))
            changelog = r.text
            r.close()
        # last change changelog, download directly
        elif uri.endswith('.changes'):
            if _DEBUG: print("Downloading: %s (%.2f MB)" % (uri, r.length / self.MB))
            changes = r.text.split("Changes:")[1].split("Checksums")[0].split("\n")
            r.close()
            for change in changes:
                change = change.strip()
                if change:
                    if change == ".":
                        change = ""
                    changelog += change + "\n"
        # compressed binary source, download and extract changelog
        else:
            source_is_cache = uri.startswith(self.apt_cache_path)
            if _DEBUG: print("Using cached package:" if source_is_cache else
                "Downloading: %s (%.2f MB)" % (uri, r.length / self.MB))
            try:
                if not source_is_cache:
                    # download stream to temporary file
                    tmpFile = tempfile.NamedTemporaryFile(prefix="apt-changelog-")
                    if self.interactive and r.length:
                        # download chunks with progress indicator
                        recv_length = 0
                        blocks = 60
                        for data in r.iter_content(chunk_size=16384):
                            recv_length += len(data)
                            tmpFile.write(data)
                            recv_pct = recv_length / r.length
                            recv_blocks = int(blocks * recv_pct)
                            print("\r[%(progress)s%(spacer)s] %(percentage).1f%%" %
                                {
                                    "progress": "=" * recv_blocks,
                                    "spacer":  " " * (blocks - recv_blocks),
                                    "percentage": recv_pct * 100
                                }, end="", flush=True)
                        # clear progress bar when done
                        print("\r" + " " * (blocks + 10), end="\r", flush=True)
                    else:
                        # no content-length or non-interactive, download in one go
                        # up to the configured max_download_size, ask only when
                        # exceeded
                        r.raw.decode_content = True
                        size = 0
                        size_exceeded = False
                        while True:
                            buf = r.raw.read(16*1024)
                            if not size_exceeded:
                                size += len(buf)
                                if size > self.max_download_size:
                                    if not self.user_confirm(self.max_download_size_msg_unknown):
                                        r.close()
                                        tmpFile.close()
                                        return ""
                                    else:
                                        size_exceeded = True
                            if not buf:
                                break
                            tmpFile.write(buf)
                    r.close()
                    tmpFile.seek(0)
                if uri.endswith(".deb"):
                    # process .deb file
                    if source_is_cache:
                        f = uri
                    else:
                        f = tmpFile.name
                        # We could copy the downloaded .deb files to the apt
                        # cache here but then we'd need to run the script elevated:
                        # shutil.copy(f, self.apt_cache_path + os.path.basename(uri))
                    deb = DebPackage(f)
                    changelog_file = self.get_changelog_from_filelist(deb.filelist)
                    if changelog_file:
                        changelog = deb.data_content(changelog_file)
                        if changelog.startswith('Automatically decompressed:'):
                            changelog = changelog[29:]
                    else:
                        raise ValueError('Malformed Debian package')
                elif uri.endswith(".diff.gz"):
                    # Ubuntu partner repo has .diff.gz files,
                    # we can extract a changelog from that
                    data = gzip.open(tmpFile.name, "r").read().decode('utf-8')
                    additions = data.split("+++")
                    for addition in additions:
                        lines = addition.split("\n")
                        if "/debian/changelog" in lines[0]:
                            for line in lines[2:]:
                                if line.startswith("+"):
                                    changelog += "%s\n" % line[1:]
                                else:
                                    break
                    if not changelog:
                        raise ValueError('No changelog in .diff.gz')
                else:
                    # process .tar.xz file
                    with tarfile.open(fileobj=tmpFile, mode="r:xz") as tar:
                        changelog_file = self.get_changelog_from_filelist(
                            [s.name for s in tar.getmembers() if s.type in (b"0", b"2")])
                        if changelog_file:
                            changelog = tar.extractfile(changelog_file).read().decode()
                        else:
                            raise ValueError('No changelog in source package')
            except Exception as e:
                _generic_exception_handler(e)
                self.exit_on_fail(520)
            if 'tmpFile' in vars():
                try:
                    tmpFile.close()
                except Exception as e:
                    _generic_exception_handler(e)

        # ALL DONE
        return changelog

    def parse_package_metadata(self, pkg_name:str):
        """ Creates the self.candidate object based on package name=version/release

        Wildcard matching is only used for version and release, and only the
        first match is processed.
        """
        # parse =version declaration
        if "=" in pkg_name:
            (pkg_name, pkg_version) = pkg_name.split("=", 1)
            pkg_release = None
        # parse /release declaration (only if no version specified)
        elif "/" in pkg_name:
            (pkg_name, pkg_release) = pkg_name.split("/", 1)
            pkg_version = None
        else:
            pkg_version = None
            pkg_release = None

        # check if pkg_name exists
        # unlike apt no pattern matching, a single exact match only
        if pkg_name in self.apt_cache.keys():
            pkg = self.apt_cache[pkg_name]
        else:
            print("E: Unable to locate package %s" % pkg_name, file=sys.stderr)
            self.close(13)

        # get package data
        _candidate = None
        candidate = None
        if pkg_release or pkg_version:
            match_found = False
            for _pkg in pkg.versions:
                if pkg_version:
                    if fnmatch.fnmatch(_pkg.version, pkg_version):
                        match_found = True
                else:
                    for _origin in _pkg.origins:
                        if fnmatch.fnmatch(_origin.archive, pkg_release):
                            match_found = True
                if match_found:
                    _candidate = _pkg
                    break
            if not match_found:
                if pkg_release:
                    print('E: Release "%s" is unavailable for "%s"' %
                        (pkg_release, pkg.name), file=sys.stderr)
                else:
                    print('E: Version "%s" is unavailable for "%s"' %
                        (pkg_version, pkg.name), file=sys.stderr)
                self.close(14)
        else:
            _candidate = pkg.candidate
        candidate = _Package(
            version = _candidate.version,
            name = _candidate.package.name,
            fullname = None,
            architecture = pkg.architecture,
            source_name = _candidate.source_name,
            source_version = _candidate.source_version,
            uri = _candidate.uri,
            filename = os.path.basename(_candidate.filename),
            origin = _candidate.origins[0].origin,
            component = _candidate.origins[0].component,
            downloadable = _candidate.downloadable,
            is_installed = _candidate.is_installed,
            dependencies = _candidate.dependencies
        )
        if candidate.is_installed:
            candidate.installed_files = pkg.installed_files
        candidate.source_version_raw = candidate.source_version
        if ":" in candidate.source_version:
            candidate.source_version = candidate.source_version.split(":", 1)[1]
        return candidate

    def check_url(self, url:str, check_size:bool=True, stream:bool=True,
        msg:str=None):
        """ True if url can be downloaded and fits size requirements """
        if _DEBUG: print("Checking:", url)
        try:
            _r = requests.get(url, stream=stream, timeout=5)
        except Exception as e:
            _generic_exception_handler(e)
        else:
            if _r:
                length = _r.headers.get("Content-Length")
                if length:
                    _r.length = int(length)
                else:
                    _r.length = 0
                if (not check_size or not
                    (check_size and _r.length > self.max_download_size and not
                    self.user_confirm(
                        (self.max_download_size_msg_lc if not msg else msg) %
                        (_r.length / self.MB))
                    )):
                    return _r
        if '_r' in vars():
            _r.close()
        return False

    @staticmethod
    def close(err:int=0):
        """ Exit """
        sys.exit(err)

    def exit_on_fail(self, err:int=404):
        """ Prints error message and calls self.close() """
        try:
            details = "Changelog unavailable for %s=%s" %\
                (self.candidate.source_name, self.candidate.source_version_raw)
        except AttributeError:
            details = ""
        print("E: Failed to fetch changelog. %s" % details, file=sys.stderr)
        self.close(err)

    @staticmethod
    def strtobool (val):
        val = val.lower()
        if val in ('y', 'yes'):
            return True
        elif val in ('n', 'no'):
            return False
        else:
            raise ValueError("Invalid response value %r" % (val,))

    def user_confirm(self, q:str):
        """ returns bool (always False in non-interactive mode) """
        if not self.interactive:
            if _DEBUG: print("Maximum size exceeded, skipping in non-interactive mode")
            return False
        print('%s [y/n] ' % q, end='')
        while True:
            try:
                response = self.strtobool(input())
                print("")
                return response
            except ValueError:
                print('Invalid response. Try again [y/n]: ', end='')
            except KeyboardInterrupt:
                pass

    def get_deb_or_tar(self, uri_tar:str=None):
        """ Returns request and URI of the preferred source

        The choice is made based on availability and size. If .deb is smaller
        than comparison_trigger_size, or if check_tar is False, then .deb is
        always selected.
        """
        comparison_trigger_size = 50000
        r_deb = self.check_url(self.candidate.uri, False)
        if r_deb:
            if uri_tar and r_deb.length > comparison_trigger_size:
                # try for .tar.xz
                r_tar = self.check_url(uri_tar, False)
                # validate and compare sizes
                if r_tar and r_tar.length < r_deb.length:
                    _r = r_tar
                    r_deb.close()
                else:
                    _r = r_deb
                    if r_tar:
                        r_tar.close()
            else:
                _r = r_deb
            if (not _r.length > self.max_download_size or
                self.user_confirm(self.max_download_size_msg_lc %
                (_r.length / self.MB))
                ):
                return (_r, _r.url)
        return (False, "")

    def get_changelog_from_filelist(self, filelist:list, local:bool=False):
        """ Returns hopefully the correct "changelog" or an empty string.

        We should not need to be searching because the debian policy says it
        must be at debian/changelog for source packages but not all seem to
        adhere to the policy:
        https://www.debian.org/doc/debian-policy/ch-source.html#debian-changelog-debian-changelog

        """
        files = [s for s in filelist if "changelog" in s.lower()]
        if local:
            testpath = '/usr/share/doc/%s/changelog' % self.candidate.name
            for item in files:
                if item.lower().startswith(testpath):
                    return item
        else:
            testpath = 'debian/changelog'
            if testpath in files:
                return testpath
            testpath = 'recipe/debian/changelog'
            if testpath in files:
                return testpath
            testpath = 'usr/share/doc/%s/changelog' % self.candidate.name
            for item in files:
                if item.lower().startswith(testpath):
                    return item
            # no hits in the standard locations, let's try our luck in
            # random locations at the risk of getting the wrong file
            for item in files:
                if os.path.basename(item).lower().startswith("changelog"):
                    return item
        return None

    def get_apt_changelog_uri(self, uri_template:str):
        """ Returns URI based on provided apt changelog URI template.

        Emulates apt's std::string pkgAcqChangelog::URI
        The template must contain the @CHANGEPATH@ variable, which will
        be expanded to
            COMPONENT/SRC/SRCNAME/SRCNAME_SRCVER
        Component is omitted for releases without one (= flat-style
        repositories).
        """
        source_version = self.candidate.source_version

        def get_kernel_version_from_meta_package(pkg):
            for dependency in pkg.dependencies:
                if not dependency.target_versions:
                    if _DEBUG: print("W: Kernel dependency not found:", dependency)
                    return None
                deppkg = dependency.target_versions[0]
                if deppkg.source_name in ("linux", "linux-signed"):
                    return deppkg.source_version
                if deppkg.source_name == "linux-meta":
                    _pkg = self.parse_package_metadata(str(deppkg))
                    return get_kernel_version_from_meta_package(_pkg)
            return None

        # Ubuntu kernel meta package workaround
        if (self.candidate.origin == "Ubuntu" and
            self.candidate.source_name == "linux-meta"
            ):
            _source_version = get_kernel_version_from_meta_package(self.candidate)
            if _source_version:
                source_version = _source_version
                self.candidate.source_name = "linux"

        # Ubuntu signed kernel workaround
        if (self.candidate.origin == "Ubuntu" and
            self.candidate.source_name == "linux-signed"
            ):
            self.candidate.source_name = "linux"

        # XXX:  Debian does not seem to reliably keep changelogs for previous
        #       (kernel) versions, so should we always look for the latest
        #       version instead on Debian? apt does not do this but the
        #       packages.debian.org website shows the latest version in the
        #       selected archive

        # strip epoch
        if ":" in source_version:
            source_version = source_version.split(":", 1)[1]

        # the path is: COMPONENT/SRC/SRCNAME/SRCNAME_SRCVER, e.g.
        #   main/a/apt/apt_1.1 or contrib/liba/libapt/libapt_2.0
        return uri_template.replace('@CHANGEPATH@',
            "%(component)s%(source_prefix)s/%(source_name)s/%(source_name)s_%(source_version)s" %
            {
                "component": self.candidate.component + "/" if \
                    self.candidate.component and \
                    self.candidate.component != "" else "",
                "source_prefix": self.source_prefix(),
                "source_name": self.candidate.source_name,
                "source_version": source_version
            })

    def source_prefix(self, source_name:str=None):
        """ Return prefix used for build repository URL """
        if not source_name:
            source_name = self.candidate.source_name
        return source_name[0] if not source_name.startswith("lib") else \
            source_name[:4]

    def parse_dsc(self, url:str):
        """ Returns filename or None """
        _r = self.check_url(url, False, False)
        if _r:
            target = ""
            lines = _r.text.split("Files:", 1)[1].split(":", 1)[0].split("-----BEGIN", 1)[0].split("\n")
            target = [s.strip() for s in lines if s.strip().lower().endswith('.debian.tar.xz')]
            if not target:
                target = [s.strip() for s in lines if s.strip().lower().endswith('.diff.gz')]
            if not target:
                target = [s.strip() for s in lines if s.strip().lower().endswith('.tar.xz')]
            # don't even test for .tar.gz, it will be too big compared to the .deb
            # if not target:
            #     target = [s.strip() for s in lines if s.strip().lower().endswith('.tar.gz')]
            if target:
                return target[0].split()[-1]
            elif _DEBUG: print(".dsc parse error for", url)
        return None

    def get_changelog_uri(self, base_uri:str):
        """ Tries to find a changelog in files listed in .dsc, locally cached
        packages as well as the remote .deb file

        Returns r and uri
        """
        uri = None
        # XXX:  For APT sources we could just read the apt_pkg.SourceRecords()
        #       directly, if available, which it is not for most users, so
        #       probably not worth it
        target_filename = self.parse_dsc("%s/%s_%s.dsc" %
            (base_uri, self.candidate.source_name, self.candidate.source_version))
        # get .debian.tar.xz or .diff.gz as a priority as the smallest options
        if (base_uri and target_filename and (
                target_filename.lower().endswith('.debian.tar.xz') or
                target_filename.lower().endswith('.diff.gz')
            )):
            uri = "%s/%s" % (base_uri, target_filename)
            target_filename = None
            r = self.check_url(uri, msg = self.max_download_size_msg)
        else:
            r = None
        if not r:
            # fall back to cached local package
            uri = self.apt_cache_path + self.candidate.filename
            if not os.path.isfile(uri):
                # cache miss, download the full source package or the .deb,
                # depending on size and availability
                if target_filename:
                    uri_tar = "%s/%s" % (base_uri, target_filename)
                else:
                    uri_tar = None
                r, uri = self.get_deb_or_tar(uri_tar)
        return (r, uri)
Beispiel #10
0
    def do_install(self, to_install, langpacks=False):
        self.nested_progress_start()

        if langpacks:
            self.db.progress('START', 0, 10, 'ubiquity/langpacks/title')
        else:
            self.db.progress('START', 0, 10, 'ubiquity/install/title')
        self.db.progress('INFO', 'ubiquity/install/find_installables')

        self.progress_region(0, 1)
        fetchprogress = DebconfAcquireProgress(
            self.db, 'ubiquity/install/title',
            'ubiquity/install/apt_indices_starting',
            'ubiquity/install/apt_indices')
        cache = Cache()

        if cache._depcache.broken_count > 0:
            syslog.syslog(
                'not installing additional packages, since there are broken '
                'packages: %s' % ', '.join(broken_packages(cache)))
            self.db.progress('STOP')
            self.nested_progress_end()
            return

        with cache.actiongroup():
            for pkg in to_install:
                mark_install(cache, pkg)

        self.db.progress('SET', 1)
        self.progress_region(1, 10)
        if langpacks:
            fetchprogress = DebconfAcquireProgress(
                self.db, 'ubiquity/langpacks/title', None,
                'ubiquity/langpacks/packages')
            installprogress = DebconfInstallProgress(
                self.db, 'ubiquity/langpacks/title',
                'ubiquity/install/apt_info')
        else:
            fetchprogress = DebconfAcquireProgress(
                self.db, 'ubiquity/install/title', None,
                'ubiquity/install/fetch_remove')
            installprogress = DebconfInstallProgress(
                self.db, 'ubiquity/install/title',
                'ubiquity/install/apt_info',
                'ubiquity/install/apt_error_install')
        chroot_setup(self.target)
        commit_error = None
        try:
            try:
                if not self.commit_with_verify(cache,
                                               fetchprogress, installprogress):
                    fetchprogress.stop()
                    installprogress.finish_update()
                    self.db.progress('STOP')
                    self.nested_progress_end()
                    return
            except IOError:
                for line in traceback.format_exc().split('\n'):
                    syslog.syslog(syslog.LOG_ERR, line)
                fetchprogress.stop()
                installprogress.finish_update()
                self.db.progress('STOP')
                self.nested_progress_end()
                return
            except SystemError as e:
                for line in traceback.format_exc().split('\n'):
                    syslog.syslog(syslog.LOG_ERR, line)
                commit_error = str(e)
        finally:
            chroot_cleanup(self.target)
        self.db.progress('SET', 10)

        cache.open(None)
        if commit_error or cache._depcache.broken_count > 0:
            if commit_error is None:
                commit_error = ''
            brokenpkgs = broken_packages(cache)
            self.warn_broken_packages(brokenpkgs, commit_error)

        self.db.progress('STOP')

        self.nested_progress_end()
class UpdateMaker():
    """A class for making updates for OreSat Linux Updater daemon"""
    def __init__(self, board: str):
        """
        Parameters
        ----------
        board: str
            The board to make the update for.
        """
        self._board = board
        self._status_file = ""
        self._board = board
        self._cache = Cache(rootdir=ROOT_DIR)
        self._deb_pkgs = []
        self._inst_list = []
        self._not_installed_yet_list = []
        self._not_removed_yet_list = []

        print("updating cache")
        self._cache.update(raise_on_error=False)
        self._cache.open()

        # copying the context of the real root apt source.list file into the local one
        if stat(OLU_APT_SOURCES_FILE
                ).st_size == 0 or not isfile(OLU_APT_SOURCES_FILE):
            copyfile(SYSTEM_APT_SOURCES_FILE, OLU_APT_SOURCES_FILE)

            # adding OreSat Debian apt repo
            with open(OLU_APT_SOURCES_FILE, "a") as f:
                f.write(
                    "deb [trusted=yes] https://debian.oresat.org/packages ./")

        # copying the apt repo signatures
        if len(listdir(OLU_SIGNATURES_DIR)) == 3:
            for root, dirs, files in walk(SYSTEM_SIGNATURES_DIR):
                for file in files:
                    if file != "lock":
                        copyfile(SYSTEM_SIGNATURES_DIR + file,
                                 OLU_SIGNATURES_DIR + file)

        # clear download dir
        for i in listdir(DOWNLOAD_DIR):
            if i.endswith(".deb"):
                remove(DOWNLOAD_DIR + i)

        status_files = []
        for i in listdir(STATUS_CACHE_DIR):
            status_files.append(OLMFile(load=i))
        status_files.sort()

        # find latest olu status tar file
        for i in status_files:
            if i.board == board:
                self._status_file = STATUS_CACHE_DIR + i.name
                break

        if self._status_file == "":
            msg = "No status file for {} board in cache".format(board)
            raise FileNotFoundError(msg)

        # update status file
        dpkg_data = read_dpkg_status_file(self._status_file)
        with open(DPKG_STATUS_FILE, "w") as fptr:
            fptr.write(dpkg_data)

        # dealing with update files that are not installed yet
        olu_status_data = read_olu_status_file(self._status_file)
        for file in literal_eval(olu_status_data):
            with tarfile.open(UPDATE_CACHE_DIR + file, "r") as tar:
                with tar.extractfile("instructions.txt") as instructions:
                    for i in json.loads(instructions.read()):
                        if i["type"] == "DPKG_INSTALL":
                            for pkg in i["items"]:
                                pkg_obj = self._cache[pkg.split('_')[0]]
                                pkg_obj.mark_install()
                            self._not_installed_yet_list.extend(i["items"])
                        elif i["type"] == "DPKG_REMOVE" or i[
                                "type"] == "DPKG_PURGE":
                            self._not_removed_yet_list.extend(i["items"])

    @property
    def not_installed_yet(self) -> list:
        return [pkg.split('_')[0] for pkg in self._not_installed_yet_list]

    @property
    def not_removed_yet(self) -> list:
        return [pkg.split('_')[0] for pkg in self._not_removed_yet_list]

    def add_packages(self, packages: list, reinstall_not_installed: list,
                     reinstall_not_removed: list):
        """Add deb packages to be installed.

        Parameters
        ----------
        packages: list
            A list of deb packages to install on the board.
        """

        if packages == []:
            raise ValueError("Requires a list of packages to install")

        inst_deb_pkgs = []

        for pkg in packages:
            pkg_obj = self._cache[pkg]

            # checking the not yet installed and removed packages
            if pkg_obj.name in reinstall_not_removed:
                pkg_index = self.not_removed_yet.index(pkg_obj.name)
                self._not_removed_yet_list.pop(pkg_index)
                pkg_obj.mark_install()
            elif pkg_obj.name in reinstall_not_installed:
                pkg_index = self.not_installed_yet.index(pkg_obj.name)
                self._not_installed_yet_list.pop(pkg_index)
                pkg_obj.mark_install()
            elif pkg_obj.name not in self.not_installed_yet and pkg_obj.name not in self.not_removed_yet:
                pkg_obj.mark_install()

            # find new packages (dependencies) that are marked
            for deb_pkg in self._cache:
                if deb_pkg.marked_install and \
                        deb_pkg.name not in self._deb_pkgs:
                    self._deb_pkgs.append(deb_pkg.name)
                    inst_deb_pkgs.append(deb_pkg.name)

        new_inst = Instruction(InstructionType.DPKG_INSTALL, inst_deb_pkgs)
        self._inst_list.append(new_inst)

    def remove_packages(self, packages: list):
        """Remove deb packages on board.

        Parameters
        ----------
        packages: list
            A list of deb packages to remove on the board.
        """

        if packages == []:
            raise ValueError("Requires a list of packages to remove")

        new_inst = Instruction(InstructionType.DPKG_REMOVE, packages)
        self._inst_list.append(new_inst)

    def purge_packages(self, packages: list):
        """Purge deb packages on board.

        Parameters
        ----------
        packages: list
            A list of deb packages to remove on the board.
        """

        if packages == []:
            raise ValueError("Requires a list of packages to remove")

        new_inst = Instruction(InstructionType.DPKG_PURGE, packages)
        self._inst_list.append(new_inst)

    def add_bash_scripts(self, bash_scipts: list):
        """Run bash scripts on the board.

        Parameters
        ----------
        bash_scipts: list
            A list of bash script to run on the board.
        """

        if bash_scipts == []:
            raise ValueError("Requires a list of bash scipts to run")

        new_inst = Instruction(InstructionType.BASH_SCRIPT, bash_scipts)
        self._inst_list.append(new_inst)

    def add_support_files(self, support_files: list):
        """Add a support files to update archive.

        Parameters
        ----------
        support_files: list
            A list of support files to add to the update.
        """

        for s_file in support_files:
            if isfile(s_file):
                raise ValueError(" {} was not found".format(s_file))

        new_inst = Instruction(InstructionType.SUPPORT_FILE, support_files)
        self._inst_list.append(new_inst)

    def status(self):
        """Print the contexts of instructions list"""

        for i in self._inst_list:
            print(i)

    def make_update_archive(self):
        """Make the update archive"""

        # download deb files
        self._cache.fetch_archives()

        # replace package name with deb filepath in instruction obj
        for inst in self._inst_list:
            if not inst.type == InstructionType.DPKG_INSTALL:
                continue

            for i in range(len(inst.items)):
                found = False
                for deb_file in listdir(DOWNLOAD_DIR):
                    if not deb_file.endswith(".deb"):
                        continue

                    if deb_file.startswith(inst.items[i] + "_"):
                        inst.items[i] = DOWNLOAD_DIR + deb_file
                        found = True
                        break

                if found is False:
                    break

        print("Making tar")

        update_file = create_update_archive(self._board, self._inst_list, "./")

        print("{} was made".format(update_file))

        # option to move generate updates to the update cache
        command = input("-> Save copy to update cache [Y/n]: ")

        if command == "Y" or command == "y" or command == "yes":
            try:
                copyfile(update_file, UPDATE_CACHE_DIR + basename(update_file))
            except:
                print("An error occurred saving the copy to update cache")
            else:
                print("{} was added to update cache".format(update_file))