def remove(*package_names) -> None: upgrade() cache = Cache() cache.update() cache.open() for name in package_names: if name not in cache: print('Package %s not found!' % (name, )) continue package = cache[name] if not package.is_installed: print('Package %s is not installed!' % (name, )) continue package.mark_delete(purge=True) cache.commit(TextFetchProgress(), InstallProgress()) cache.close()
def install(*package_names) -> None: upgrade() cache = Cache() cache.update() cache.open() for name in package_names: if name not in cache: logger.error('Package %s not found!' % (name, )) continue package = cache[name] if package.is_installed: logger.warning('Package %s already installed!' % (name, )) continue package.mark_install() cache.commit(TextFetchProgress(), InstallProgress()) cache.close()
def upgrade() -> None: cache = Cache() cache.update() cache.open() cache.update() cache.open() cache.upgrade(dist_upgrade=True) cache.fix_broken() cache.commit(TextFetchProgress(), InstallProgress()) cache.close()
def do_update(mark_only): _, progress = query_verbosity() log.info("Getting list of eligible packages...") cache = Cache(progress) f_cache = FilteredCache(cache) f_cache.set_filter(NvidiaFilter()) names = f_cache.keys() with unhold(names, cache): # mark_only means we just want the side-effects of exiting the # unhold() context manager. if mark_only: return False log.info("Updating package list...") try: cache.update() except FetchFailedException, err: log.warn(err) cache.open(progress) # Refresh package list old_versions = {name: cache[name].installed for name in names} log.info("Updating all packages...") for name in names: if cache[name].is_upgradable: cache[name].mark_upgrade() cache.commit(None, None) log.info("Refreshing package cache...") cache.open(progress) new_versions = {name: cache[name].installed for name in names} log.info("Checking whether packages were upgraded...") for name in old_versions: if old_versions[name] != new_versions[name]: log.info("Kernel module changed") return True return False
class UpdateMaker(): """A class for making updates for OreSat Linux Updater daemon""" def __init__(self, board: str): """ Parameters ---------- board: str The board to make the update for. """ self._board = board self._status_file = "" self._board = board self._cache = Cache(rootdir=ROOT_DIR) self._cache.update(raise_on_error=False) self._cache.open() self._deb_pkgs = [] self._inst_list = [] # make sure all dir exist Path(OLU_DIR).mkdir(parents=True, exist_ok=True) Path(ROOT_DIR).mkdir(parents=True, exist_ok=True) Path(DOWNLOAD_DIR).mkdir(parents=True, exist_ok=True) Path(UPDATE_CACHE_DIR).mkdir(parents=True, exist_ok=True) Path(STATUS_CACHE_DIR).mkdir(parents=True, exist_ok=True) # clear download dir for i in listdir(DOWNLOAD_DIR): if i.endswith(".deb"): remove(DOWNLOAD_DIR + i) status_files = [] for i in listdir(STATUS_CACHE_DIR): status_files.append(OLMFile(load=i)) status_files.sort() # find latest olu status tar file for i in status_files: if i.name == board: self._status_file = STATUS_CACHE_DIR + i.name break if self._status_file == "": msg = "No status file for {} board in cache".format(board) raise FileNotFoundError(msg) # update status file dpkg_data = read_dpkg_status_file(self._status_file) with open(DPKG_STATUS_FILE) as fptr: fptr.write(dpkg_data) # TODO deal with update files that are not installed yet. def add_packages(self, packages: list): """Add deb packages to be installed. Parameters ---------- packages: list A list of deb packages to install on the board. """ if packages == []: raise ValueError("Requires a list of packages to install") inst_deb_pkgs = [] for pkg in packages: pkg_obj = self._cache[pkg] pkg_obj.mark_install() # this will mark all dependencies too # find new packages (dependencies) that are marked for deb_pkg in self._cache: if deb_pkg.marked_install and \ deb_pkg.name not in self._deb_pkgs: self._deb_pkgs.append(deb_pkg.name) inst_deb_pkgs.append(deb_pkg.name) new_inst = Instruction(InstructionType.DPKG_INSTALL, inst_deb_pkgs) self._inst_list.append(new_inst) def remove_packages(self, packages: list): """Remove deb packages on board. Parameters ---------- packages: list A list of deb packages to remove on the board. """ if packages == []: raise ValueError("Requires a list of packages to remove") new_inst = Instruction(InstructionType.DPKG_REMOVE, packages) self._inst_list.append(new_inst) def purge_packages(self, packages: list): """Purge deb packages on board. Parameters ---------- packages: list A list of deb packages to remove on the board. """ if packages == []: raise ValueError("Requires a list of packages to remove") new_inst = Instruction(InstructionType.DPKG_PURGE, packages) self._inst_list.append(new_inst) def add_bash_scripts(self, bash_scipts: list): """Run bash scripts on the board. Parameters ---------- bash_scipts: list A list of bash script to run on the board. """ if bash_scipts == []: raise ValueError("Requires a list of bash scipts to run") new_inst = Instruction(InstructionType.BASH_SCRIPT, bash_scipts) self._inst_list.append(new_inst) def add_support_files(self, support_files: list): """Add a support files to update archive. Parameters ---------- support_files: list A list of support files to add to the update. """ for s_file in support_files: if isfile(s_file): raise ValueError(" {} was not found".format(s_file)) new_inst = Instruction(InstructionType.SUPPORT_FILE, support_files) self._inst_list.append(new_inst) def status(self): """Print the contexts of instructions list""" for i in self._inst_list: print(i) def make_update_archive(self): """Make the update archive""" # download deb files self._cache.fetch_archives() # replace package name with deb filepath in instruction obj for inst in self._inst_list: if not inst.type == InstructionType.DPKG_INSTALL: continue for i in range(len(inst.items)): found = False for deb_file in listdir(DOWNLOAD_DIR): if not deb_file.endswith(".deb"): continue if deb_file.startswith(inst.items[i]+"_"): inst.items[i] = DOWNLOAD_DIR + deb_file found = True break if found is True: break print("Making tar") update_file = create_update_file(self._board, self._inst_list, "./") print("{} was made".format(update_file))
class IsolatedAptCache(object): """A apt.cache.Cache wrapper that isolates it from the system it runs on. :ivar cache: the isolated cache. :type cache: apt.cache.Cache """ def __init__(self, sources, architecture=None, prefer_label=None): """Create an IsolatedAptCache. :param sources: a list of sources such that they can be prefixed with "deb " and fed to apt. :type sources: an iterable of str :param architecture: the architecture to fetch packages for. :type architecture: str """ self.sources = sources self.architecture = architecture self.tempdir = None self.prefer_label = prefer_label def prepare(self): """Prepare the IsolatedAptCache for use. Should be called before use, and after any modification to the list of sources. """ self.cleanup() logger.debug("Writing apt configs") self.tempdir = tempfile.mkdtemp(prefix="hwpack-apt-cache-") dirs = ["var/lib/dpkg", "etc/apt/sources.list.d", "var/cache/apt/archives/partial", "var/lib/apt/lists/partial", ] for d in dirs: os.makedirs(os.path.join(self.tempdir, d)) self.set_installed_packages([], reopen=False) sources_list = os.path.join( self.tempdir, "etc", "apt", "sources.list") with open(sources_list, 'w') as f: for source in self.sources: # To make a file URL look like an HTTP one (for urlparse) # We do this to use urlparse, which is probably more robust # than any regexp we come up with. mangled_source = source if re.search("file:/[^/]", source): mangled_source = re.sub("file:/", "file://", source) url_parsed = urlparse.urlsplit(mangled_source) # If the source uses authentication, don't put in sources.list if url_parsed.password: url_parts_without_user_pass = [url_parsed.scheme, url_parsed.hostname, url_parsed.path, url_parsed.query, url_parsed.fragment] auth_name = os.path.join( self.tempdir, "etc", "apt", "auth.conf") with open(auth_name, 'w') as auth: auth.write( "machine " + url_parsed.hostname + "\n" + "login " + url_parsed.username + "\n" + "password " + url_parsed.password + "\n") source = urlparse.urlunsplit(url_parts_without_user_pass) # Get rid of extra / in file URLs source = re.sub("file://", "file:/", source) f.write("deb %s\n" % source) if self.architecture is not None: apt_conf = os.path.join(self.tempdir, "etc", "apt", "apt.conf") with open(apt_conf, 'w') as f: f.write( 'Apt {\nArchitecture "%s";\n' 'Install-Recommends "true";\n}\n' % self.architecture) if self.prefer_label is not None: apt_preferences = os.path.join( self.tempdir, "etc", "apt", "preferences") with open(apt_preferences, 'w') as f: f.write( 'Package: *\n' 'Pin: release l=%s\n' 'Pin-Priority: 1001\n' % self.prefer_label) # XXX: This is a temporary workaround for bug 885895. apt_pkg.config.set("Dir::bin::dpkg", "/bin/false") self.cache = Cache(rootdir=self.tempdir, memonly=True) logger.debug("Updating apt cache") try: self.cache.update() except FetchFailedException, e: obfuscated_e = re.sub(r"([^ ]https://).+?(@)", r"\1***\2", str(e)) raise FetchFailedException(obfuscated_e) self.cache.open() return self
class Transition(dbus.service.Object): def __init__(self, conn=None, object_path=None, bus_name=None): super().__init__(conn, object_path, bus_name) self.dbus_info = None self.polkit = None self.enforce_polkit = True self.cache = Cache() self.lock = None self.apt_lock = None @dbus.service.method( 'org.pop_os.transition_system.Interface', in_signature='', out_signature='b', sender_keyword='sender', connection_keyword='conn' ) def obtain_lock(self, sender=None, conn=None): """ Lock the package system. """ self._check_polkit_privilege( sender, conn, 'org.pop_os.transition_system.removedebs' ) print('Obtaining Package manager lock') try: self.lock = apt_pkg.get_lock('/var/lib/dpkg/lock-frontend', True) self.apt_lock = apt_pkg.get_lock('/var/lib/apt/lists/lock', True) print('Lock obtained') return True except apt_pkg.Error: print('Could not obtain lock') self.lock = None self.apt_lock = None return False @dbus.service.method( 'org.pop_os.transition_system.Interface', in_signature='', out_signature='b', sender_keyword='sender', connection_keyword='conn' ) def release_lock(self, sender=None, conn=None): """ Unlock the package system. """ self._check_polkit_privilege( sender, conn, 'org.pop_os.transition_system.removedebs' ) print('Releasing package manager lock') try: os.close(self.lock) os.close(self.apt_lock) self.lock = None self.apt_lock = None print('Lock released') return True except: print('Could not release lock') return False @dbus.service.method( 'org.pop_os.transition_system.Interface', in_signature='', out_signature='b', sender_keyword='sender', connection_keyword='conn' ) def open_cache(self, sender=None, conn=None): """ Open the package cache. """ self._check_polkit_privilege( sender, conn, 'org.pop_os.transition_system.removedebs' ) if self.lock and self.apt_lock: print('Opening package cache') self.cache.update() self.cache.open() print('Cache open') return True print('No lock, cannot open cache') return False @dbus.service.method( 'org.pop_os.transition_system.Interface', in_signature='', out_signature='b', sender_keyword='sender', connection_keyword='conn' ) def commit_changes(self, sender=None, conn=None): """ Commit changes to the cache. """ self._check_polkit_privilege( sender, conn, 'org.pop_os.transition_system.removedebs' ) if self.lock and self.apt_lock: self.cache.commit() print('Committed changes to cache') return True print('No lock, Cannot commit changes') return False @dbus.service.method( 'org.pop_os.transition_system.Interface', in_signature='', out_signature='b', sender_keyword='sender', connection_keyword='conn' ) def close_cache(self, sender=None, conn=None): """ Close the package cache. """ self._check_polkit_privilege( sender, conn, 'org.pop_os.transition_system.removedebs' ) if self.lock and self.apt_lock: self.cache.close() print('Package cache closed') return True print('No lock, cannot close cache') return False @dbus.service.method( 'org.pop_os.transition_system.Interface', in_signature='s', out_signature='s', sender_keyword='sender', connection_keyword='conn' ) def remove_package(self, package, sender=None, conn=None): """ Mark a package for removal.""" self._check_polkit_privilege( sender, conn, 'org.pop_os.transition_system.removedebs' ) if self.lock and self.apt_lock: print(f'Marking {package} for removal') try: pkg = self.cache[package] pkg.mark_delete() return pkg.name except: print(f'Could not mark {package} for removal') return '' print('No lock, cannot mark packages') return '' @dbus.service.method( 'org.pop_os.transition_system.Interface', in_signature='', out_signature='', sender_keyword='sender', connection_keyword='conn' ) def exit(self, sender=None, conn=None): if self.lock and self.apt_lock: self.close_cache() self.release_lock() mainloop.quit() def _check_polkit_privilege(self, sender, conn, privilege): '''Verify that sender has a given PolicyKit privilege. sender is the sender's (private) D-BUS name, such as ":1:42" (sender_keyword in @dbus.service.methods). conn is the dbus.Connection object (connection_keyword in @dbus.service.methods). privilege is the PolicyKit privilege string. This method returns if the caller is privileged, and otherwise throws a PermissionDeniedByPolicy exception. ''' if sender is None and conn is None: # Called locally, not through D-Bus return if not self.enforce_polkit: # For testing return if self.dbus_info is None: self.dbus_info = dbus.Interface(conn.get_object('org.freedesktop.DBus', '/org/freedesktop/DBus/Bus', False), 'org.freedesktop.DBus') pid = self.dbus_info.GetConnectionUnixProcessID(sender) if self.polkit is None: self.polkit = dbus.Interface(dbus.SystemBus().get_object( 'org.freedesktop.PolicyKit1', '/org/freedesktop/PolicyKit1/Authority', False), 'org.freedesktop.PolicyKit1.Authority' ) try: (is_auth, _, details) = self.polkit.CheckAuthorization( ('unix-process', {'pid': dbus.UInt32(pid, variant_level=1), 'start-time': dbus.UInt64(0, variant_level=1)}), privilege, {'': ''}, dbus.UInt32(1), '', timeout=600 ) except dbus.DBusException as e: if e._dbus_error_name == 'org.freedesktop.DBus.Error.ServiceUnknown': # polkitd timed out, connect again self.polkit = None return self._check_polkit_privilege(sender, conn, privilege) else: raise if not is_auth: raise PermissionDeniedByPolicy(privilege)
class IsolatedAptCache(object): """A apt.cache.Cache wrapper that isolates it from the system it runs on. :ivar cache: the isolated cache. :type cache: apt.cache.Cache """ def __init__(self, sources, architecture=None, prefer_label=None): """Create an IsolatedAptCache. :param sources: a list of sources such that they can be prefixed with "deb " and fed to apt. :type sources: an iterable of str :param architecture: the architecture to fetch packages for. :type architecture: str """ self.sources = sources self.architecture = architecture self.tempdir = None self.prefer_label = prefer_label def prepare(self): """Prepare the IsolatedAptCache for use. Should be called before use, and after any modification to the list of sources. """ self.cleanup() logger.debug("Writing apt configs") self.tempdir = tempfile.mkdtemp(prefix="hwpack-apt-cache-") dirs = [ "var/lib/dpkg", "etc/apt/sources.list.d", "var/cache/apt/archives/partial", "var/lib/apt/lists/partial", ] for d in dirs: os.makedirs(os.path.join(self.tempdir, d)) self.set_installed_packages([], reopen=False) sources_list = os.path.join(self.tempdir, "etc", "apt", "sources.list") with open(sources_list, 'w') as f: for source in self.sources: # To make a file URL look like an HTTP one (for urlparse) # We do this to use urlparse, which is probably more robust # than any regexp we come up with. mangled_source = source if re.search("file:/[^/]", source): mangled_source = re.sub("file:/", "file://", source) url_parsed = urlparse.urlsplit(mangled_source) # If the source uses authentication, don't put in sources.list if url_parsed.password: url_parts_without_user_pass = [ url_parsed.scheme, url_parsed.hostname, url_parsed.path, url_parsed.query, url_parsed.fragment ] auth_name = os.path.join(self.tempdir, "etc", "apt", "auth.conf") with open(auth_name, 'w') as auth: auth.write("machine " + url_parsed.hostname + "\n" + "login " + url_parsed.username + "\n" + "password " + url_parsed.password + "\n") source = urlparse.urlunsplit(url_parts_without_user_pass) # Get rid of extra / in file URLs source = re.sub("file://", "file:/", source) f.write("deb %s\n" % source) if self.architecture is not None: apt_conf = os.path.join(self.tempdir, "etc", "apt", "apt.conf") with open(apt_conf, 'w') as f: f.write('Apt {\nArchitecture "%s";\n' 'Install-Recommends "true";\n}\n' % self.architecture) if self.prefer_label is not None: apt_preferences = os.path.join(self.tempdir, "etc", "apt", "preferences") with open(apt_preferences, 'w') as f: f.write('Package: *\n' 'Pin: release l=%s\n' 'Pin-Priority: 1001\n' % self.prefer_label) # XXX: This is a temporary workaround for bug 885895. apt_pkg.config.set("Dir::bin::dpkg", "/bin/false") self.cache = Cache(rootdir=self.tempdir, memonly=True) logger.debug("Updating apt cache") try: self.cache.update() except FetchFailedException, e: obfuscated_e = re.sub(r"([^ ]https://).+?(@)", r"\1***\2", str(e)) raise FetchFailedException(obfuscated_e) self.cache.open() return self
# self.apt_status = os.WEXITSTATUS(status) # self.finished = True # # def error(self, pkg, errormsg): # """Called when an error happens. # # Emits: status_error() # """ # self.emit(QtCore.SIGNAL("status_error()")) # def conffile(self, current, new): # """Called during conffile. # # Emits: status-conffile() # """ # self.emit("status-conffile") # # def start_update(self): # """Called when the update starts. # # Emits: status-started() # """ # self.emit("status-started") if __name__ =='__main__': from apt.cache import Cache import apt c = Cache(QOpProgress()) c.update(QAcquireProgress()) c.commit(QAcquireProgress(), QInstallProgress())
class UpdateMaker(): """A class for making updates for OreSat Linux Updater daemon""" def __init__(self, board: str): """ Parameters ---------- board: str The board to make the update for. """ self._board = board self._status_file = "" self._board = board self._cache = Cache(rootdir=ROOT_DIR) self._deb_pkgs = [] self._inst_list = [] self._not_installed_yet_list = [] self._not_removed_yet_list = [] print("updating cache") self._cache.update(raise_on_error=False) self._cache.open() # copying the context of the real root apt source.list file into the local one if stat(OLU_APT_SOURCES_FILE ).st_size == 0 or not isfile(OLU_APT_SOURCES_FILE): copyfile(SYSTEM_APT_SOURCES_FILE, OLU_APT_SOURCES_FILE) # adding OreSat Debian apt repo with open(OLU_APT_SOURCES_FILE, "a") as f: f.write( "deb [trusted=yes] https://debian.oresat.org/packages ./") # copying the apt repo signatures if len(listdir(OLU_SIGNATURES_DIR)) == 3: for root, dirs, files in walk(SYSTEM_SIGNATURES_DIR): for file in files: if file != "lock": copyfile(SYSTEM_SIGNATURES_DIR + file, OLU_SIGNATURES_DIR + file) # clear download dir for i in listdir(DOWNLOAD_DIR): if i.endswith(".deb"): remove(DOWNLOAD_DIR + i) status_files = [] for i in listdir(STATUS_CACHE_DIR): status_files.append(OLMFile(load=i)) status_files.sort() # find latest olu status tar file for i in status_files: if i.board == board: self._status_file = STATUS_CACHE_DIR + i.name break if self._status_file == "": msg = "No status file for {} board in cache".format(board) raise FileNotFoundError(msg) # update status file dpkg_data = read_dpkg_status_file(self._status_file) with open(DPKG_STATUS_FILE, "w") as fptr: fptr.write(dpkg_data) # dealing with update files that are not installed yet olu_status_data = read_olu_status_file(self._status_file) for file in literal_eval(olu_status_data): with tarfile.open(UPDATE_CACHE_DIR + file, "r") as tar: with tar.extractfile("instructions.txt") as instructions: for i in json.loads(instructions.read()): if i["type"] == "DPKG_INSTALL": for pkg in i["items"]: pkg_obj = self._cache[pkg.split('_')[0]] pkg_obj.mark_install() self._not_installed_yet_list.extend(i["items"]) elif i["type"] == "DPKG_REMOVE" or i[ "type"] == "DPKG_PURGE": self._not_removed_yet_list.extend(i["items"]) @property def not_installed_yet(self) -> list: return [pkg.split('_')[0] for pkg in self._not_installed_yet_list] @property def not_removed_yet(self) -> list: return [pkg.split('_')[0] for pkg in self._not_removed_yet_list] def add_packages(self, packages: list, reinstall_not_installed: list, reinstall_not_removed: list): """Add deb packages to be installed. Parameters ---------- packages: list A list of deb packages to install on the board. """ if packages == []: raise ValueError("Requires a list of packages to install") inst_deb_pkgs = [] for pkg in packages: pkg_obj = self._cache[pkg] # checking the not yet installed and removed packages if pkg_obj.name in reinstall_not_removed: pkg_index = self.not_removed_yet.index(pkg_obj.name) self._not_removed_yet_list.pop(pkg_index) pkg_obj.mark_install() elif pkg_obj.name in reinstall_not_installed: pkg_index = self.not_installed_yet.index(pkg_obj.name) self._not_installed_yet_list.pop(pkg_index) pkg_obj.mark_install() elif pkg_obj.name not in self.not_installed_yet and pkg_obj.name not in self.not_removed_yet: pkg_obj.mark_install() # find new packages (dependencies) that are marked for deb_pkg in self._cache: if deb_pkg.marked_install and \ deb_pkg.name not in self._deb_pkgs: self._deb_pkgs.append(deb_pkg.name) inst_deb_pkgs.append(deb_pkg.name) new_inst = Instruction(InstructionType.DPKG_INSTALL, inst_deb_pkgs) self._inst_list.append(new_inst) def remove_packages(self, packages: list): """Remove deb packages on board. Parameters ---------- packages: list A list of deb packages to remove on the board. """ if packages == []: raise ValueError("Requires a list of packages to remove") new_inst = Instruction(InstructionType.DPKG_REMOVE, packages) self._inst_list.append(new_inst) def purge_packages(self, packages: list): """Purge deb packages on board. Parameters ---------- packages: list A list of deb packages to remove on the board. """ if packages == []: raise ValueError("Requires a list of packages to remove") new_inst = Instruction(InstructionType.DPKG_PURGE, packages) self._inst_list.append(new_inst) def add_bash_scripts(self, bash_scipts: list): """Run bash scripts on the board. Parameters ---------- bash_scipts: list A list of bash script to run on the board. """ if bash_scipts == []: raise ValueError("Requires a list of bash scipts to run") new_inst = Instruction(InstructionType.BASH_SCRIPT, bash_scipts) self._inst_list.append(new_inst) def add_support_files(self, support_files: list): """Add a support files to update archive. Parameters ---------- support_files: list A list of support files to add to the update. """ for s_file in support_files: if isfile(s_file): raise ValueError(" {} was not found".format(s_file)) new_inst = Instruction(InstructionType.SUPPORT_FILE, support_files) self._inst_list.append(new_inst) def status(self): """Print the contexts of instructions list""" for i in self._inst_list: print(i) def make_update_archive(self): """Make the update archive""" # download deb files self._cache.fetch_archives() # replace package name with deb filepath in instruction obj for inst in self._inst_list: if not inst.type == InstructionType.DPKG_INSTALL: continue for i in range(len(inst.items)): found = False for deb_file in listdir(DOWNLOAD_DIR): if not deb_file.endswith(".deb"): continue if deb_file.startswith(inst.items[i] + "_"): inst.items[i] = DOWNLOAD_DIR + deb_file found = True break if found is False: break print("Making tar") update_file = create_update_archive(self._board, self._inst_list, "./") print("{} was made".format(update_file)) # option to move generate updates to the update cache command = input("-> Save copy to update cache [Y/n]: ") if command == "Y" or command == "y" or command == "yes": try: copyfile(update_file, UPDATE_CACHE_DIR + basename(update_file)) except: print("An error occurred saving the copy to update cache") else: print("{} was added to update cache".format(update_file))