def load_version(pkg_name: str, version: str, arch=''): """ loads a specify version of a package Args: pkg_name (str): name of package version (str): that version you want to load arch (str): load specify arch (optional) Returns: int (1): package not found int (2): package found, but version/arch not found Pkg object: package and version found and returned """ if not os.path.isfile(Env.packages_lists('/' + pkg_name + '/index')): return 1 # load package index file f_index = open(Env.packages_lists('/' + pkg_name + '/index'), 'r') index_content = f_index.read() f_index.close() try: # load json index = json.loads(index_content) except: return 1 # load package versions list versions = [] try: versions = index[arch] except: try: versions = index[SysArch.sys_arch()] arch = SysArch.sys_arch() except: try: versions = index['all'] arch = 'all' except: versions = index[list(index.keys())[0]] arch = list(index.keys())[0] for ver in versions: if ver == version: # load this version data try: f_ver = open( Env.packages_lists('/' + pkg_name + '/' + ver + '-' + arch), 'r') f_ver_content = f_ver.read() ver_data = json.loads(f_ver_content) return Pkg(ver_data) except: return 2 return 2
def load_from_index(index_json: dict, package_name: str): """ Loads package data from index file Args: package_name (str): name of the package index_json (dict): loaded json data from `/var/lib/cati/lists/<pkgname>/index` file Returns: Pkg: the loaded Pkg object """ try: arch = sys_arch() versions = index_json[arch] except: try: arch = 'all' versions = index_json[arch] except: arch = list(index_json.keys())[0] versions = index_json[arch] # load latest version ver = Pkg.get_last_version(versions) f = open( Env.packages_lists('/' + package_name + '/' + ver + '-' + arch), 'r') content = f.read() f.close() content_json = json.loads(content) return Pkg(content_json)
def all_list(): """ Returns list of packages Returns: dict: output has two keys: { "list": list[Pkg] // list of packages "errors": list // errors while loading packages } """ errors = [] packages = [] tmp_list = os.listdir(Env.packages_lists()) tmp_list.sort() for item in tmp_list: if os.path.isfile(Env.packages_lists('/' + item + '/index')): f_index = open(Env.packages_lists('/' + item + '/index'), 'r') try: index_content = f_index.read() try: index_json = json.loads(index_content) try: pkg = Pkg.load_from_index(index_json, item) packages.append(pkg) except: errors.append('faild to load package "' + item + '"') except: errors.append('invalid json content in "' + Env.packages_lists('/' + item + '/index') + '"') except: errors.append('cannot read file "' + Env.packages_lists('/' + item + '/index') + '"') else: errors.append( f'package "{item}" has not index file in lists ({Env.packages_lists("/" + item + "/index")} not found)' ) return {'list': packages, 'errors': errors}
def update_indexes(events: dict): """ This function loads available versions of a package and index them in index file and do this action for all of packages in lists. Args: events: (dict) the `events` argument should be a dictonary from functions. this will use to handle errors for example if some thing went wrong, the spesific function in events will run. events: - cannot_read_file: if in this process, an error happened while reading a file, this will run with file path arg - invalid_json_data: if json content of a file is curropt, this will run with file path and content args """ require_root_permission() for pkg in os.listdir(Env.packages_lists()): pkg_index = {} if os.path.isdir(Env.packages_lists('/' + pkg)): for version in os.listdir(Env.packages_lists('/' + pkg)): if version != 'index': if os.path.isfile( Env.packages_lists('/' + pkg + '/' + version)): content = None try: f = open( Env.packages_lists('/' + pkg + '/' + version), 'r') content = f.read() except: events['cannot_read_file']( Env.packages_lists('/' + pkg + '/' + version)) if content != None: try: content_json = json.loads(content) try: tmp = pkg_index[content_json['arch']] del tmp except: pkg_index[content_json['arch']] = [] pkg_index[content_json['arch']].append( content_json['version']) except: events['invalid_json_data']( Env.packages_lists('/' + pkg + '/' + version), content) # write generated index to index file f_index = open(Env.packages_lists('/' + pkg + '/index'), 'w') f_index.write(json.dumps(pkg_index)) f_index.close()
def run(self): """ Run test """ self.assert_equals( self.run_command('pkg', [ 'install', 'repository/test-repository/simple-test-package.cati' ]), 0) self.assert_true(os.path.isfile(self.env('/usr/bin/cati-testpkga'))) self.assert_equals( self.run_command('pkg', [ 'install', 'repository/test-repository/simple-test-package.cati' ]), 0) self.assert_true(os.path.isfile(self.env('/usr/bin/cati-testpkga'))) pkg = Pkg.load_from_index( json.loads( open(Env.packages_lists('/testpkga/index'), 'r').read()), 'testpkga') self.assert_equals(pkg.installed(), '1.0') self.refresh_env() self.assert_equals( self.run_command('pkg', [ 'install', 'repository/test-repository/testpkg-with-file-conflict-a.cati', ]), 0) self.assert_true( os.path.isfile(self.env() + '/etc/testpkg-with-file-conflict/test.txt')) self.refresh_env() os.mkdir(self.env() + '/app') self.assert_equals( self.run_command('pkg', [ 'install', 'repository/test-repository/testpkg-with-file-conflict-a.cati', '--target=/app' ]), 0) self.assert_true(not os.path.isfile( self.env() + '/etc/testpkg-with-file-conflict/test.txt')) self.assert_true( os.path.isfile(self.env() + '/app/etc/testpkg-with-file-conflict/test.txt'))
def require_root_permission(is_cli=True, die_action=None): """ checks root premission. Args: is_cli (bool): if is True, when user have not root permission, error will print in terminal. but if is False, the `die_action` will run as a function. (will be disable in testing environment) die_action (callable): the function will be run when `is_cli` is False """ # if program is in testing mode don't check permission if is_testing: return if os.getuid() == 0: return # check write and read access for needed files files_to_check = [ Env.packages_lists(), Env.installed_lists(), Env.state_file(), Env.unremoved_conffiles(), Env.security_blacklist(), Env.any_scripts(), Env.repos_config(), Env.repos_config_dir(), Env.cache_dir(), Env.allowed_archs(), ] for f in files_to_check: if not os.access(f, os.W_OK) or not os.access(f, os.R_OK): if is_cli: pr.e(ansi.red + sys.argv[0] + ': permission is denied' + ansi.reset) pr.exit(1) return else: die_action() return
def get_versions_list(self): """ returns versions list of the package Returns: list: list of versions: [ [<version>, <arch>] ] """ try: f_index = open( Env.packages_lists('/' + self.data['name'] + '/index'), 'r') index_json = json.loads(f_index.read()) f_index.close() except: return [] try: versions = [] for k in index_json: for ver in index_json[k]: versions.append([ver, k]) return versions except: return []
def run(self): """ Run command """ RootRequired.require_root_permission() if not self.is_quiet(): pr.p('Loading repositories list...') repos = Repo.get_list() ReposListErrorShower.show(repos) if not self.is_quiet(): pr.p('Prepairing to update repos...') orig_repos = [] for repo in repos: if repo.successful_loaded: if repo.test(): orig_repos.append(repo) else: pr.e(ansi.red + 'Cannot make connection to repo "' + repo.full_string + '"' + ansi.reset) if not self.is_quiet(): pr.p('Updating repositories...') pr.p('=============================') # downloaded repos data files paths downloaded_paths = [] # update repos for repo in list(reversed(orig_repos)): if not self.is_quiet(): pr.p('Fetching ' + repo.name + ' (' + repo.url + ') data...') data = repo.get_data(download_event=self.download_event) if type(data) == int: pr.e(ansi.red + 'Cannot update ' + repo.name + ' (' + repo.url + '): error code ' + str(data) + ansi.reset) elif isinstance(data, Exception): pr.e(ansi.red + 'Cannot update ' + repo.name + ' (' + repo.url + '): ' + str(data) + ansi.reset) else: # validate data try: tmp = json.loads(data) # save data in an file path = Env.cache_dir('/' + repo.name + '-' + str(time.time()) + str(random.random())) + '.json' f = open(path, 'w') f.write(data) f.close() downloaded_paths.append(path) except: pr.e(ansi.red + 'Cannot update ' + repo.name + ' (' + repo.url + '): invalid json data recived' + ansi.reset) if not self.is_quiet(): pr.p('Updating packages list...') # load downloaded data packages = [] for path in downloaded_paths: f = open(path, 'r') data = f.read().strip() f.close() items = json.loads(data) for item in items: if PackageJsonValidator.validate(item): packages.append(item) else: pass for pkg in packages: if PackageJsonValidator.validate(pkg): if self.is_verbose(): pr.p('adding ' + pkg['name'] + ':' + pkg['version'] + ':' + pkg['arch'] + '...') # write package on list if not os.path.isdir(Env.packages_lists('/' + pkg['name'])): os.mkdir(Env.packages_lists('/' + pkg['name'])) try: f = open( Env.packages_lists('/' + pkg['name'] + '/' + pkg['version'] + '-' + pkg['arch']), 'w') f.write(json.dumps(pkg)) f.close() except: pr.e(ansi.red + 'error while adding ' + pkg['name'] + ':' + pkg['version'] + ':' + pkg['arch'] + ansi.reset) else: if self.is_verbose(): pr.p(ansi.yellow + 'invalid json data in an item. ignored...' + ansi.reset) if self.is_quiet(): pr.p('Finishing update...') ListUpdater.update_indexes({ 'cannot_read_file': self.empty_method, 'invalid_json_data': self.empty_method, }) pr.p(ansi.green + 'Done.' + ansi.reset)
def run(self): """ Run command """ RootRequired.require_root_permission() pr.p('Loading packages list...') pr.p('========================') loaded_packages = [] for argument in self.arguments: arg_parts = argument.split('=') if len(arg_parts) == 1: # load last version as default pkg = Pkg.load_last(argument) else: # load specify version pkg = Pkg.load_version(arg_parts[0], arg_parts[1]) if pkg == 1: pkg = False elif pkg == 2: self.message('package "' + arg_parts[0] + '" has not version "' + arg_parts[1] + '"' + ansi.reset, before=ansi.red) continue else: pkg.only_specify_version = True if pkg: try: pkg.only_specify_version except: pkg.only_specify_version = False if pkg.installed(): if not pkg.only_specify_version: self.message( 'package "' + argument + '" is installed. cannot forget installed packages' + ansi.reset, before=ansi.red) continue else: if pkg.installed() == pkg.data['version']: self.message( 'package ' + argument + ' (' + pkg.data['version'] + ') is installed. cannot forget installed packages' + ansi.reset, before=ansi.red) continue loaded_packages.append(pkg) else: self.message('unknow package "' + argument + '"' + ansi.reset, before=ansi.red) if not loaded_packages: return 1 # forget loaded packages for pkg in loaded_packages: if not pkg.only_specify_version: # forget all of package versions shutil.rmtree(Env.packages_lists('/' + pkg.data['name'])) pr.p('Package ' + pkg.data['name'] + ' was forgoten successfully') else: files = glob.glob( Env.packages_lists('/' + pkg.data['name'] + '/' + pkg.data['version'] + '-*')) for f in files: if not '-' in f[len( Env.packages_lists('/' + pkg.data['name'] + '/' + pkg.data['version'] + '-')):]: os.remove(f) pr.p('Version ' + pkg.data['version'] + ' of package ' + pkg.data['name'] + ' was forgoten successfully') try: if len(os.listdir( Env.packages_lists('/' + pkg.data['name']))) <= 1: shutil.rmtree(Env.packages_lists('/' + pkg.data['name'])) except: pass ListUpdater.update_indexes({ 'cannot_read_file': self.empty_method_for_event, 'invalid_json_data': self.empty_method_for_event, })
def install(self, pkg: BaseArchive, index_updater_events: dict, installer_events: dict, is_manual=True, run_scripts=True, target_path='', keep_conffiles=False, check_security_blacklist=True): """ Install .cati package Args: pkg (BaseArchive): the package archive object index_updater_events (dict): events will be passed to `dotcati.ListUpdater.update_indexes()` installer_events (dict): The events - package_currently_install: gets the current installed version - package_new_installs: gets package archive - package_installed: will call after package installation - dep_and_conflict_error: will run when there is depends or conflict error - arch_error: will run when package arch is not sync with sys arch is_manual (bool): installed package as manual or not (default is True means manual) run_scripts (bool): run package install scripts or not (default is True) target_path (str): where is the target path for installed files (will pass to `self.copy_files()`) keep_conffiles (bool): stil keep config files if changed (default is True) check_security_blacklist (bool): check package is in security blacklist or not """ self.conffiles = pkg.get_conffiles() self.pkg = pkg self.keep_conffiles = keep_conffiles self.uncopied_conffiles = {} # check package is in security blacklist if check_security_blacklist: self.check_security_blacklist(pkg) # check package architecture if not pkg.data['arch'] in SysArch.allowed_archs(): return installer_events['arch_error'](pkg) # check package dependencies and conflicts try: self.check_dep_and_conf(pkg) except DependencyError as ex: return installer_events['dep_and_conflict_error'](pkg, ex) except ConflictError as ex: return installer_events['dep_and_conflict_error'](pkg, ex) # load old conffiles self.old_conffiles = [] try: f = open(Env.installed_lists('/' + pkg.data['name'] + '/conffiles'), 'r') content = f.read() f.close() tmp = content.strip().split('\n') self.old_conffiles = [item.strip().split('@') for item in tmp] except: pass # add package data to lists if not os.path.isdir(Env.packages_lists('/' + pkg.data['name'])): os.mkdir(Env.packages_lists('/' + pkg.data['name'])) lists_path = Env.packages_lists('/' + pkg.data['name'] + '/' + pkg.data['version'] + '-' + pkg.data['arch']) try: lists_f = open(lists_path, 'r') old_repo = json.loads(lists_f.read())['repo'] lists_f.close() except: old_repo = 'Local' try: lists_f = open(lists_path, 'r') old_file_path = json.loads(lists_f.read())['file_path'] lists_f.close() except: old_file_path = False try: lists_f = open(lists_path, 'r') old_file_sha256 = json.loads(lists_f.read())['file_sha256'] lists_f.close() except: old_file_sha256 = False try: lists_f = open(lists_path, 'r') old_file_md5 = json.loads(lists_f.read())['file_md5'] lists_f.close() except: old_file_md5 = False lists_f = open(lists_path, 'w') pkg.data['repo'] = old_repo if old_file_path != False: pkg.data['file_path'] = old_file_path tmp_pkg_data = pkg.data if old_file_md5: tmp_pkg_data['file_md5'] = old_file_md5 if old_file_sha256: tmp_pkg_data['file_sha256'] = old_file_sha256 tmp_pkg_data['files'] = ['/' + member[6:] for member in pkg.members() if member[:6] == 'files/'] lists_f.write(json.dumps(tmp_pkg_data)) lists_f.close() ListUpdater.update_indexes(index_updater_events) # extract package in a temp place temp_dir = Temp.make_dir() os.rmdir(temp_dir) try: pkg.extractall(temp_dir) except IsADirectoryError: pass self.extracted_package_dir = temp_dir # install package if Pkg.is_installed(pkg.data['name']): installer_events['package_currently_installed'](pkg, Pkg.installed_version(pkg.data['name'])) else: installer_events['package_new_installs'](pkg) if run_scripts: self.run_script('ins-before') copied_files = self.copy_files(pkg, installer_events['directory_not_empty'], target_path) # set install configuration if not os.path.isdir(Env.installed_lists('/' + pkg.data['name'])): os.mkdir(Env.installed_lists('/' + pkg.data['name'])) f_ver = open(Env.installed_lists('/' + pkg.data['name'] + '/ver'), 'w') f_ver.write(pkg.data['version']) # write installed version f_ver.close() # write copied files list f_files = open(Env.installed_lists('/' + pkg.data['name'] + '/files'), 'w') copied_files_str = '' for copied_file in copied_files: copied_files_str += copied_file + '\n' f_files.write(copied_files_str.strip()) # write copied files f_files.close() # write conffiles list f_conffiles = open(Env.installed_lists('/' + pkg.data['name'] + '/conffiles'), 'w') copied_conffiles_str = '' for copied_conffile in copied_files: if copied_conffile.split(':')[0] == 'cf': try: conffile_hash = self.uncopied_conffiles[copied_conffile.split(':', 1)[-1]] except: conffile_hash = calc_file_sha256(Env.base_path(copied_conffile.split(':', 1)[-1])) copied_conffiles_str += conffile_hash + '@' + copied_conffile.split(':', 1)[-1] + '\n' f_conffiles.write(copied_conffiles_str.strip()) # write copied conffiles f_conffiles.close() # copy `any` script if os.path.isfile(self.extracted_package_dir + '/scripts/any'): os.system('cp "' + self.extracted_package_dir + '/scripts/any' + '" "' + Env.any_scripts('/' + pkg.data['name']) + '"') # save static files list static_files_list = pkg.get_static_files() f_static_files = open(Env.installed_lists('/' + pkg.data['name'] + '/staticfiles'), 'w') static_files_str = '' for copied_file in copied_files: copied_file_path = copied_file.split(':', 1)[1] if copied_file_path in static_files_list: if os.path.isfile(Env.base_path('/' + copied_file_path)): # calculate file sha256 sum copied_file_sha256 = calc_file_sha256(Env.base_path('/' + copied_file_path)) # add file to list static_files_str += copied_file_sha256 + '@' + copied_file_path + '\n' f_static_files.write(static_files_str.strip()) # write copied files f_static_files.close() f_installed_at = open(Env.installed_lists('/' + pkg.data['name'] + '/installed_at'), 'w') f_installed_at.write(str(time.time())) # write time (installed at) f_installed_at.close() if is_manual: f_manual = open(Env.installed_lists('/' + pkg.data['name'] + '/manual'), 'w') f_manual.write('') f_manual.close() if run_scripts: self.run_script('ins-after') # copy remove scripts if os.path.isfile(self.extracted_package_dir + '/scripts/rm-before'): os.system( 'cp "' + self.extracted_package_dir + '/scripts/rm-before' + '" "' + Env.installed_lists('/' + pkg.data['name'] + '/rm-before') + '"' ) if os.path.isfile(self.extracted_package_dir + '/scripts/rm-after'): os.system( 'cp "' + self.extracted_package_dir + '/scripts/rm-after' + '" "' + Env.installed_lists('/' + pkg.data['name'] + '/rm-after') + '"' ) # pop package from state BaseTransaction.pop_state() # call package installed event installer_events['package_installed'](pkg)