def copy_once_file(self, paths): """ Copy one of package files (this method is called from `copy_files` method) """ if os.path.isfile(paths[1]): if paths[0] in self.conffiles: self.copied_files.append('cf:' + paths[0]) old_conffiles = [item[-1] for item in self.old_conffiles] if paths[0] in old_conffiles: f_hash = calc_file_sha256(paths[1]) if [f_hash, paths[0]] in self.old_conffiles: self.uncopied_conffiles[paths[0]] = f_hash return else: if self.keep_conffiles: self.uncopied_conffiles[paths[0]] = f_hash return else: self.copied_files.append('f:' + paths[0]) os.system('cp "' + paths[1] + '" "' + Env.base_path(paths[0]) + '"') else: os.mkdir(Env.base_path(paths[0])) if paths[1] in self.conffiles: self.copied_files.append('cd:' + paths[0]) else: self.copied_files.append('d:' + paths[0])
def run(self): """ Run command """ if self.has_option('--edit') or self.has_option('-e'): return os.system('vim "' + Env.repos_config() + '"') if self.has_option('--add') or self.has_option('-a'): RootRequired.require_root_permission() repo_string = '' for arg in self.arguments: repo_string += arg + ' ' repo_string = repo_string.strip() tmp_repo = Repo(repo_string) tmp_repo.loaded_from_file = 'argument' tmp_repo.line_number = 0 if not tmp_repo.successful_loaded: ReposListErrorShower.show([tmp_repo]) return 1 # write repo path = Env.repos_config_dir('/' + tmp_repo.name + '-' + tmp_repo.get_pkg_str() + '-' + tmp_repo.get_arch_str()) tmp = '' tmp_i = 1 while os.path.isfile(path + tmp): tmp = '-' + str(tmp_i) f = open(path, 'w') f.write('# added manually\n' + repo_string) f.close() return 0 if self.has_option('--scan'): for arg in self.arguments: if os.path.isdir(arg): Scanner.scan(arg) pr.p(ansi.green + 'directory ' + arg + ' scanned successfully' + ansi.reset) else: self.message('directory' + arg + ' not found', is_error=True) return 0 # show list of repos if not self.is_quiet(): pr.p('Loading repositories list...') repos = Repo.get_list() if not self.is_quiet(): pr.p('============================') ReposListErrorShower.show(repos) for repo in repos: if repo.successful_loaded: is_disable = '' if repo.is_disable: is_disable = ' (Disabled)' pr.p(repo.name + ': ' + repo.url + ' pkg=' + repo.get_pkg_str() + ' arch=' + repo.get_arch_str() + ' channel=' + repo.get_channel_str() + is_disable)
def add_package_to_lists(self, pkg: Pkg, index_updater_events: dict): """ Adds the package information to database Args: pkg (Pkg): the package index_updater_events (dict): events for index updater """ if not os.path.isdir(Env.packages_lists('/' + pkg.data['name'])): os.mkdir(Env.packages_lists('/' + pkg.data['name'])) lists_path = Env.packages_lists('/' + pkg.data['name'] + '/' + pkg.data['version'] + '-' + pkg.data['arch']) try: lists_f = open(lists_path, 'r') old_repo = json.loads(lists_f.read())['repo'] lists_f.close() except: old_repo = 'Local' try: lists_f = open(lists_path, 'r') old_file_path = json.loads(lists_f.read())['file_path'] lists_f.close() except: old_file_path = False try: lists_f = open(lists_path, 'r') old_file_sha256 = json.loads(lists_f.read())['file_sha256'] lists_f.close() except: old_file_sha256 = False try: lists_f = open(lists_path, 'r') old_file_md5 = json.loads(lists_f.read())['file_md5'] lists_f.close() except: old_file_md5 = False lists_f = open(lists_path, 'w') pkg.data['repo'] = old_repo if old_file_path != False: pkg.data['file_path'] = old_file_path tmp_pkg_data = pkg.data if old_file_md5: tmp_pkg_data['file_md5'] = old_file_md5 if old_file_sha256: tmp_pkg_data['file_sha256'] = old_file_sha256 tmp_pkg_data['files'] = [ '/' + member[6:] for member in pkg.members() if member[:6] == 'files/' ] lists_f.write(json.dumps(tmp_pkg_data)) lists_f.close() ListUpdater.update_indexes(index_updater_events) ListUpdater.index_reverse_depends_and_conflicts(pkg)
def pop_state(): """ remove first item from state """ f = open(Env.state_file(), 'r') content = f.read() f.close() content = content.strip() lines = content.split('\n') if lines: lines.pop(0) new_content = '' for line in lines: new_content += line + '\n' f = open(Env.state_file(), 'w') f.write(new_content) f.close()
def state_list(): """ returns list of undoned transactions from state file """ f = open(Env.state_file(), 'r') content = f.read() f.close() content = content.strip().split('\n') content = [line.strip() for line in content] result = [] for item in content: if item != '': tmp = {} parts = item.split('%') tmp['action'] = parts[0] tmp['pkg'] = parts[1] try: tmp['version'] = parts[2] except: tmp['version'] = None try: tmp['arch'] = parts[3] except: tmp['arch'] = None try: tmp['file'] = parts[4] except: tmp['file'] = None result.append(tmp) return result
def set_manual_installs(self, packages): """ Sets installed packages type (manual/auto) """ for pkg in packages: if not pkg.is_manual: path = Env.installed_lists('/' + pkg.data['name'] + '/manual') if os.path.isfile(path): os.remove(path)
def get_list() -> list: """ returns list of repositories Returns: list[Repo]: list of loaded repositories """ repos = [] files = [Env.repos_config()] for item in os.listdir(Env.repos_config_dir()): if os.path.isfile(Env.repos_config_dir('/' + item)): files.append(Env.repos_config_dir('/' + item)) for fl in files: f = open(fl, 'r') repos_content = f.read() f.close() lines = repos_content.split('\n') line_counter = 1 for line in lines: line = line.split('#')[0].strip() line = line.strip() if line != '': repo = Repo(line) repo.line_number = line_counter repo.loaded_from_file = fl repos.append(repo) line_counter += 1 # sort by priority sorted_repos = [] while len(repos): i = 0 while i < len(repos): is_less_than_all = True j = 0 while j < len(repos): if int(repos[i].priority) > int(repos[j].priority): is_less_than_all = False j += 1 if is_less_than_all: sorted_repos.append(repos[i]) repos.pop(i) i += 1 return sorted_repos
def run_any_scripts(runed_transactions: list, events: dict): """ run all of `any` scripts. events: - start_run_script: will run when starting to run once script (gets package name) """ runed_transactions_str = '' for rt in runed_transactions[1]: runed_transactions_str += rt + ' ' runed_transactions_str = runed_transactions_str.strip() scripts = os.listdir(Env.any_scripts()) for script in scripts: events['start_run_script'](script) # run script os.system('chmod +x "' + Env.any_scripts('/' + script) + '"') os.system( Env.any_scripts('/' + script) + ' ' + runed_transactions[0] + ' ' + runed_transactions_str)
def add_to_unremoved_conffiles(pkg: Pkg, filepath: str): """ Adds filepath to list of unremoved conffiles """ f = open(Env.unremoved_conffiles(), 'r') filelist = f.read().strip().split('\n') f.close() # add item to list if not filepath in filelist: filelist.append(filepath) # generate new content of unremoved_conffiles file new_content = '' for item in filelist: new_content += item + '\n' # write new content to file f = open(Env.unremoved_conffiles(), 'w') f.write(new_content) f.close()
def update_indexes(events: dict): """ This function loads available versions of a package and index them in index file and do this action for all of packages in lists. Args: events: (dict) the `events` argument should be a dictonary from functions. this will use to handle errors for example if some thing went wrong, the spesific function in events will run. events: - cannot_read_file: if in this process, an error happened while reading a file, this will run with file path arg - invalid_json_data: if json content of a file is curropt, this will run with file path and content args """ require_root_permission() for pkg in os.listdir(Env.packages_lists()): pkg_index = {} if os.path.isdir(Env.packages_lists('/' + pkg)): for version in os.listdir(Env.packages_lists('/' + pkg)): if version not in [ 'index', 'reverse_depends', 'reverse_conflicts' ]: if os.path.isfile( Env.packages_lists('/' + pkg + '/' + version)): content = None try: f = open( Env.packages_lists('/' + pkg + '/' + version), 'r') content = f.read() except: events['cannot_read_file']( Env.packages_lists('/' + pkg + '/' + version)) if content != None: try: content_json = json.loads(content) try: tmp = pkg_index[content_json['arch']] del tmp except: pkg_index[content_json['arch']] = [] pkg_index[content_json['arch']].append( content_json['version']) except: events['invalid_json_data']( Env.packages_lists('/' + pkg + '/' + version), content) # write generated index to index file f_index = open(Env.packages_lists('/' + pkg + '/index'), 'w') f_index.write(json.dumps(pkg_index)) f_index.close()
def run(self): """ Run test """ self.assert_equals( self.run_command( 'pkg', ['install', 'repository/test-repository/testpkgc-2.0.cati']), 0) state_f = open(Env.state_file(), 'w') state_f.write(f'install%testpackage1%1.0%amd64\nremove%anotherpackage') state_f.close() self.assert_equals(self.run_command('remove', [ 'testpkgc', '-y', ]), 1) self.assert_equals( self.run_command( 'pkg', ['install', 'repository/test-repository/testpkgc-2.0.cati']), 1) # tests for cli `state` command self.assert_equals(self.run_command('state'), 1) self.assert_equals(self.run_command('state', ['--abort', '-y']), 0) self.assert_equals(self.run_command('state'), 0) self.refresh_env() self.assert_equals( self.run_command( 'pkg', ['install', 'repository/test-repository/testpkgc-2.0.cati']), 0) self.assert_true(Pkg.is_installed('testpkgc')) state_f = open(Env.state_file(), 'w') state_f.write(f'remove%testpkgc') state_f.close() self.assert_equals(self.run_command('state'), 1) self.assert_equals(self.run_command('state', ['--complete']), 0) self.assert_equals(self.run_command('state'), 0) self.assert_true(not Pkg.is_installed('testpkgc'))
def add_to_state(calc: Calculator): """ add new item to state """ content = '' for item in calc.get_sorted_list(): content += item['action'] + '%' + item['pkg'].data[ 'name'] + '%' + item['pkg'].data['version'] + '%' + item[ 'pkg'].data['arch'] + '\n' f = open(Env.state_file(), 'w') f.write(content) f.close()
def run(self): """ Run test """ self.assert_equals( self.run_command('pkg', [ 'install', 'repository/test-repository/simple-test-package.cati' ]), 0) self.assert_true(os.path.isfile(self.env('/usr/bin/cati-testpkga'))) self.assert_equals( self.run_command('pkg', [ 'install', 'repository/test-repository/simple-test-package.cati' ]), 0) self.assert_true(os.path.isfile(self.env('/usr/bin/cati-testpkga'))) pkg = Pkg.load_from_index( json.loads( open(Env.packages_lists('/testpkga/index'), 'r').read()), 'testpkga') self.assert_equals(pkg.installed(), '1.0') self.refresh_env() self.assert_equals( self.run_command('pkg', [ 'install', 'repository/test-repository/testpkg-with-file-conflict-a.cati', ]), 0) self.assert_true( os.path.isfile(self.env() + '/etc/testpkg-with-file-conflict/test.txt')) self.refresh_env() os.mkdir(self.env() + '/app') self.assert_equals( self.run_command('pkg', [ 'install', 'repository/test-repository/testpkg-with-file-conflict-a.cati', '--target=/app' ]), 0) self.assert_true(not os.path.isfile( self.env() + '/etc/testpkg-with-file-conflict/test.txt')) self.assert_true( os.path.isfile(self.env() + '/app/etc/testpkg-with-file-conflict/test.txt'))
def run(self): """ Run command """ RootRequired.require_root_permission() for f in os.listdir(Env.cache_dir()): if os.path.isfile(Env.cache_dir('/' + f)): if self.is_verbose(): pr.p('removing ' + Env.cache_dir('/' + f) + '...') os.remove(Env.cache_dir('/' + f)) for f in os.listdir(Env.cache_dir('/archives')): if os.path.isfile(Env.cache_dir('/archives/' + f)): if self.is_verbose(): pr.p('removing ' + Env.cache_dir('/archives/' + f) + '...') os.remove(Env.cache_dir('/archives/' + f)) pr.p(ansi.green + 'Cache files cleared successfully' + ansi.reset)
def run(): """ start running tests """ print('Starting test system...') print('=======================') # load test environment print('Loading test environment...', end=' ') load_test_env() print(ansi.green + 'created in ' + Env.base_path() + ansi.reset) print() # enable testing mode pr.is_testing = True SysArch.is_testing = True # load tests list tests_list = os.listdir('tests/items') # clean up tests list orig_tests = [] for test in tests_list: if test[len(test)-3:] == '.py': exec('from items.' + test[:len(test)-3] + ' import ' + test[:len(test)-3]) exec("orig_tests.append(" + test[:len(test)-3] + "())") # start running tests count = 0 for test in orig_tests: test_name = test.get_name() print('\t[' + str(count + 1) + '/' + str(len(orig_tests)) + ']\t' + test_name.replace('_', ' ') + ': ', end='', flush=True) test.refresh_env() test.run() test.refresh_env() print(ansi.green + 'PASS' + ansi.reset) count += 1 print() print(ansi.green + 'All ' + str(count) + ' tests passed successfuly') print('Cleaning up...' + ansi.reset) if os.path.isfile('testpkgc-1.0.cati'): os.remove('testpkgc-1.0.cati') shutil.rmtree(Env.base_path_dir) Temp.clean()
def run(self): """ Run command """ RootRequired.require_root_permission() pr.p('Loading packages list...') pr.p('========================') loaded_packages = [] for argument in self.arguments: arg_parts = argument.split('=') if len(arg_parts) == 1: # load last version as default pkg = Pkg.load_last(argument) else: # load specify version pkg = Pkg.load_version(arg_parts[0], arg_parts[1]) if pkg == 1: pkg = False elif pkg == 2: self.message('package "' + arg_parts[0] + '" has not version "' + arg_parts[1] + '"' + ansi.reset, before=ansi.red) continue else: pkg.only_specify_version = True if pkg: try: pkg.only_specify_version except: pkg.only_specify_version = False if pkg.installed(): if not pkg.only_specify_version: self.message('package "' + argument + '" is installed. cannot forget installed packages' + ansi.reset, before=ansi.red) continue else: if pkg.installed() == pkg.data['version']: self.message('package ' + argument + ' (' + pkg.data['version'] + ') is installed. cannot forget installed packages' + ansi.reset, before=ansi.red) continue loaded_packages.append(pkg) else: self.message('unknow package "' + argument + '"' + ansi.reset, before=ansi.red) if not loaded_packages: return 1 # forget loaded packages for pkg in loaded_packages: if not pkg.only_specify_version: # forget all of package versions shutil.rmtree(Env.packages_lists('/' + pkg.data['name'])) pr.p('Package ' + pkg.data['name'] + ' was forgoten successfully') else: files = glob.glob(Env.packages_lists('/' + pkg.data['name'] + '/' + pkg.data['version'] + '-*')) for f in files: if not '-' in f[len(Env.packages_lists('/' + pkg.data['name'] + '/' + pkg.data['version'] + '-')):]: os.remove(f) pr.p('Version ' + pkg.data['version'] + ' of package ' + pkg.data['name'] + ' was forgoten successfully') try: if len(os.listdir(Env.packages_lists('/' + pkg.data['name']))) <= 1: shutil.rmtree(Env.packages_lists('/' + pkg.data['name'])) except: pass ListUpdater.update_indexes({ 'cannot_read_file': self.empty_method_for_event, 'invalid_json_data': self.empty_method_for_event, })
def install(self, pkg: BaseArchive, index_updater_events: dict, installer_events: dict, is_manual=True, run_scripts=True, target_path='', keep_conffiles=False, check_security_blacklist=True): """ Install .cati package Args: pkg (BaseArchive): the package archive object index_updater_events (dict): events will be passed to `dotcati.ListUpdater.update_indexes()` installer_events (dict): The events - package_currently_install: gets the current installed version - package_new_installs: gets package archive - package_installed: will call after package installation - dep_and_conflict_error: will run when there is depends or conflict error - arch_error: will run when package arch is not sync with sys arch is_manual (bool): installed package as manual or not (default is True means manual) run_scripts (bool): run package install scripts or not (default is True) target_path (str): where is the target path for installed files (will pass to `self.copy_files()`) keep_conffiles (bool): stil keep config files if changed (default is True) check_security_blacklist (bool): check package is in security blacklist or not """ self.conffiles = pkg.get_conffiles() self.pkg = pkg self.keep_conffiles = keep_conffiles self.uncopied_conffiles = {} # check package is in security blacklist if check_security_blacklist: self.check_security_blacklist(pkg) # check package architecture if not pkg.data['arch'] in SysArch.allowed_archs(): return installer_events['arch_error'](pkg) # add package data to lists self.add_package_to_lists(pkg, index_updater_events) # check package dependencies and conflicts try: self.check_dep_and_conf(pkg) except DependencyError as ex: return installer_events['dep_and_conflict_error'](pkg, ex) except ConflictError as ex: return installer_events['dep_and_conflict_error'](pkg, ex) # load old conffiles self.old_conffiles = [] try: f = open( Env.installed_lists('/' + pkg.data['name'] + '/conffiles'), 'r') content = f.read() f.close() tmp = content.strip().split('\n') self.old_conffiles = [item.strip().split('@') for item in tmp] except: pass # extract package in a temp place temp_dir = Temp.make_dir() os.rmdir(temp_dir) try: pkg.extractall(temp_dir) except IsADirectoryError: pass self.extracted_package_dir = temp_dir # install package if Pkg.is_installed(pkg.data['name']): installer_events['package_currently_installed']( pkg, Pkg.installed_version(pkg.data['name'])) else: installer_events['package_new_installs'](pkg) if run_scripts: self.run_script('ins-before') copied_files = self.copy_files(pkg, installer_events['directory_not_empty'], target_path) # set install configuration if not os.path.isdir(Env.installed_lists('/' + pkg.data['name'])): os.mkdir(Env.installed_lists('/' + pkg.data['name'])) f_ver = open(Env.installed_lists('/' + pkg.data['name'] + '/ver'), 'w') f_ver.write(pkg.data['version']) # write installed version f_ver.close() # write copied files list f_files = open(Env.installed_lists('/' + pkg.data['name'] + '/files'), 'w') copied_files_str = '' for copied_file in copied_files: copied_files_str += copied_file + '\n' f_files.write(copied_files_str.strip()) # write copied files f_files.close() # write conffiles list f_conffiles = open( Env.installed_lists('/' + pkg.data['name'] + '/conffiles'), 'w') copied_conffiles_str = '' for copied_conffile in copied_files: if copied_conffile.split(':')[0] == 'cf': try: conffile_hash = self.uncopied_conffiles[ copied_conffile.split(':', 1)[-1]] except: conffile_hash = calc_file_sha256( Env.base_path(copied_conffile.split(':', 1)[-1])) copied_conffiles_str += conffile_hash + '@' + copied_conffile.split( ':', 1)[-1] + '\n' f_conffiles.write( copied_conffiles_str.strip()) # write copied conffiles f_conffiles.close() # copy `any` script if os.path.isfile(self.extracted_package_dir + '/scripts/any'): os.system('cp "' + self.extracted_package_dir + '/scripts/any' + '" "' + Env.any_scripts('/' + pkg.data['name']) + '"') # save static files list static_files_list = pkg.get_static_files() f_static_files = open( Env.installed_lists('/' + pkg.data['name'] + '/staticfiles'), 'w') static_files_str = '' for copied_file in copied_files: copied_file_path = copied_file.split(':', 1)[1] if copied_file_path in static_files_list: if os.path.isfile(Env.base_path('/' + copied_file_path)): # calculate file sha256 sum copied_file_sha256 = calc_file_sha256( Env.base_path('/' + copied_file_path)) # add file to list static_files_str += copied_file_sha256 + '@' + copied_file_path + '\n' f_static_files.write(static_files_str.strip()) # write copied files f_static_files.close() f_installed_at = open( Env.installed_lists('/' + pkg.data['name'] + '/installed_at'), 'w') f_installed_at.write(str(time.time())) # write time (installed at) f_installed_at.close() if is_manual: f_manual = open( Env.installed_lists('/' + pkg.data['name'] + '/manual'), 'w') f_manual.write('') f_manual.close() if run_scripts: self.run_script('ins-after') # copy remove scripts if os.path.isfile(self.extracted_package_dir + '/scripts/rm-before'): os.system('cp "' + self.extracted_package_dir + '/scripts/rm-before' + '" "' + Env.installed_lists('/' + pkg.data['name'] + '/rm-before') + '"') if os.path.isfile(self.extracted_package_dir + '/scripts/rm-after'): os.system('cp "' + self.extracted_package_dir + '/scripts/rm-after' + '" "' + Env.installed_lists('/' + pkg.data['name'] + '/rm-after') + '"') # pop package from state BaseTransaction.pop_state() # call package installed event installer_events['package_installed'](pkg)
def run(self): """ Run command """ RootRequired.require_root_permission() pr.p('Loading packages list...') pr.p('========================') loaded_packages = [] for argument in self.arguments: arg_parts = argument.split('=') if len(arg_parts) == 1: # load last version as default pkg = Pkg.load_last(argument) else: # load specify version pkg = Pkg.load_version(arg_parts[0], arg_parts[1]) if pkg == 1: pkg = False elif pkg == 2: self.message('package "' + arg_parts[0] + '" has not version "' + arg_parts[1] + '"' + ansi.reset, before=ansi.red) continue if pkg: loaded_packages.append(pkg) else: self.message('unknow package "' + argument + '"' + ansi.reset, before=ansi.red) # remove local packages from list new_loaded_packages = [] for pkg in loaded_packages: try: file_path = pkg.data['file_path'] new_loaded_packages.append(pkg) except: self.message('package "' + pkg.data['name'] + '" is a local package', is_error=True) loaded_packages = new_loaded_packages if not loaded_packages: return 1 # calculate transactions pr.p('Calculating transactions...') calc = Calculator(with_recommends=self.has_option('--with-recommends')) i = 0 while i < len(loaded_packages): loaded_packages[i].is_manual = True i += 1 try: calc.install(list(reversed(loaded_packages))) except: pr.e(ansi.red + 'ERROR: There is some dependnecy problems.' + ansi.reset) return 1 # handle reinstallable packages i = 0 while i < len(calc.to_install): if calc.to_install[i].installed(): if calc.to_install[i].installed( ) == calc.to_install[i].wanted_version: if not self.has_option('--reinstall'): pr.p( 'Package ' + calc.to_install[i].data['name'] + '=' + calc.to_install[i].wanted_version + ' is currently installed. use --reinstall option to re-install it.' ) if calc.to_install[i].is_manual: try: pr.p( 'Setting it as manual installed package...' ) manual_f = open( Env.installed_lists('/' + pkg.data['name'] + '/manual'), 'w') manual_f.write('') manual_f.close() except: pass calc.to_install.pop(i) i += 1 # check transactions exists if not calc.has_any_thing(): pr.p('Nothing to do.') return 0 # show transaction TransactionShower.show(calc) if not self.has_option('-y') or self.has_option('--yes'): pr.p('Do you want to continue? [Y/n] ', end='') answer = input() if answer == 'y' or answer == 'Y' or answer == '': pass else: pr.p('Abort.') return 1 # start download packages pr.p('Downloading packages...') downloaed_paths = [] for pkg in calc.to_install: download_path = Env.cache_dir('/archives/' + pkg.data['name'] + '-' + pkg.wanted_version + '-' + pkg.data['arch']) if os.path.isfile(download_path): file_sha256 = calc_file_sha256(download_path) file_md5 = calc_file_md5(download_path) valid_sha256 = None valid_md5 = None try: valid_sha256 = pkg.data['file_sha256'] except: valid_sha256 = file_sha256 try: valid_md5 = pkg.data['file_md5'] except: valid_md5 = file_md5 if file_md5 != valid_md5 or file_sha256 != valid_sha256: # file is corrupt and should be download again os.remove(download_path) else: pr.p('Using Cache for ' + pkg.data['name'] + ':' + pkg.data['version'] + ':' + pkg.data['arch'] + '...') downloaed_paths.append(download_path) continue download_cmd = DownloadCommand() i = 0 res = 1 tmp = True while tmp: if i > 5: pr.e(ansi.red + 'Failed to download packages' + ansi.reset) return res pr.p('Downloading ' + pkg.data['name'] + ':' + pkg.data['version'] + ':' + pkg.data['arch'] + '...') res = download_cmd.handle( ArgParser.parse([ 'cati', 'download', '-q', pkg.data['name'] + '=' + pkg.wanted_version, '--output=' + download_path ])) if res == 1 or res == None: tmp = False i += 1 downloaed_paths.append(download_path) pr.p('Download completed.') # check --download-only option if self.has_option('--download-only'): return 0 cati_pkg_cmd_options = [] remove_cmd_options = [] if self.has_option('--keep-conffiles'): cati_pkg_cmd_options.append('--keep-conffiles') if self.has_option('--target'): cati_pkg_cmd_options.append('--target') if self.has_option('--without-scripts'): cati_pkg_cmd_options.append('--without-scripts') remove_cmd_options.append('--without-scripts') # remove packages if calc.to_remove: pr.p('Removing packages...') package_names = [pkg.data['name'] for pkg in calc.to_remove] remove_cmd = RemoveCommand() res = remove_cmd.handle( ArgParser.parse([ 'cati', 'remove', *package_names, '-y', *remove_cmd_options ])) if res != 0 and res != None: pr.e(ansi.red + 'Failed to remove packages' + ansi.reset) return res # install packages pr.p('Installing packages...') pkg_cmd = PkgCommand() res = pkg_cmd.handle( ArgParser.parse([ 'cati', 'pkg', 'install', *downloaed_paths, *cati_pkg_cmd_options ])) if res != 0 and res != None: self.set_manual_installs(calc.to_install) pr.e(ansi.red + 'Failed to install packages' + ansi.reset) return res self.set_manual_installs(calc.to_install) pr.p(ansi.green + 'Done.' + ansi.reset)
def copy_files(self, pkg: BaseArchive, directory_not_empty_event, target_path='') -> list: """ Copy package files on system Args: pkg (BaseArchive): the package archive object directory_not_empty_event (callable): the function will be run when we want to delete old direcotry of package and that is not empty. target_path (str): target path prefix of copied files location. default is `/` means copies files on the root directory. you can change it. Returns: list[str]: list of copied files """ # load package old files list old_files = [] if os.path.isfile( Env.installed_lists('/' + pkg.data['name'] + '/files')): try: f = open( Env.installed_lists('/' + pkg.data['name'] + '/files'), 'r') for line in f.read().strip().split('\n'): if line != '': old_files.append(line.strip()) except: pass old_files = list(reversed(old_files)) # load unremoved conffiles list unremoved_conffiles_f = open(Env.unremoved_conffiles(), 'r') unremoved_conffiles = unremoved_conffiles_f.read().strip().split('\n') unremoved_conffiles_f.close() temp_dir = self.extracted_package_dir # load files list from `files` directory of package self.loaded_files = [] self.load_files(temp_dir + '/files', temp_dir + '/files') self.loaded_files = [[target_path + f[0], f[1]] for f in self.loaded_files] # check file conflicts all_installed_files = Pkg.get_all_installed_files_list() for lf in self.loaded_files: if not os.path.isdir(lf[1]): for insf in all_installed_files: if insf[0].split(':', 1)[0] != pkg.data['name']: if lf[0] == insf[2]: insf_pkg = Pkg.load_last(insf[0]) if insf_pkg: insf[0] = insf[0] + ':' + insf_pkg.installed() # check package is in `replaces` list do_raise_error = True replaces_list = pkg.get_replaces() for rep in replaces_list: if Pkg.check_state(rep): tmp_parts = rep.split(' ') tmp_parts = tmp_parts[0].split('>') tmp_parts = tmp_parts[0].split('<') tmp_parts = tmp_parts[0].split('=') tmp_parts = tmp_parts[0] if tmp_parts == insf[0].split(':', 1)[0]: do_raise_error = False if do_raise_error: raise FileConflictError('package ' + pkg.data['name'] + ':' + pkg.data['version'] + ' and ' + insf[0] + ' both has file "' + lf[0] + '"') # copy loaded files self.copied_files = [] for f in self.loaded_files: if os.path.exists(Env.base_path(f[0])): if os.path.isfile(Env.base_path(f[0])): if ('f:' + f[0]) in old_files or ('cf:' + f[0]) in old_files: self.copy_once_file(f) try: old_files.pop(old_files.index(('f:' + f[0]))) except: pass else: if f[0] in unremoved_conffiles: self.copy_once_file(f) unremoved_conffiles.pop( unremoved_conffiles.index(f[0])) else: self.copy_once_file(f) else: if ('d:' + f[0]) in old_files or ('cd:' + f[0]) in old_files: if ('cd:' + f[0]) in old_files: self.copied_files.append('cd:' + f[0]) old_files.pop(old_files.index(('cd:' + f[0]))) else: self.copied_files.append('d:' + f[0]) old_files.pop(old_files.index(('d:' + f[0]))) else: if f[0] in unremoved_conffiles: self.copied_files.append('d:' + f[0]) unremoved_conffiles.pop( unremoved_conffiles.index(f[0])) else: self.copy_once_file(f) # delete not wanted old files for item in old_files: parts = item.strip().split(':', 1) if parts[0] == 'cf' or parts[0] == 'cd': pass else: if os.path.isfile(parts[1]): os.remove(parts[1]) else: try: os.rmdir(parts[1]) except: # directory is not emptyr directory_not_empty_event(pkg, parts[1]) # write new unremoved conffiles list unremoved_conffiles_f = open(Env.unremoved_conffiles(), 'w') new_content = '' for item in unremoved_conffiles: new_content += item + '\n' unremoved_conffiles_f.write(new_content) unremoved_conffiles_f.close() return self.copied_files
def run(self): """ Run command """ # require root permission require_root_permission() result_code = 0 packages_to_reinstall = [] if not self.is_quiet(): pr.p('Starting checking system health and security...') pr.p('===============================================') # check state state_cmd = StateCommand() out = state_cmd.handle(ArgParser.parse(['cati', 'state'])) if out > 0: return out # search for conflict and dependency corruptions if not self.is_quiet(): pr.p('Checking dependency and conflict corruptions...') dependency_problems = [] conflict_problems = [] installed_packages = Pkg.installed_list()['list'] for pkg in installed_packages: if self.is_verbose(): pr.p('[info] checking dependencies and conflicts for ' + pkg.data['name'] + '...') for dp in pkg.get_depends(): if not Pkg.check_state(dp): dependency_problems.append([ pkg, dp ]) for conflict in pkg.get_conflicts(): if Pkg.check_state(conflict): conflict_problems.append([ pkg, conflict ]) if dependency_problems or conflict_problems: for depend in dependency_problems: pr.p(ansi.red + 'dependency problem for ' + depend[0].data['name'] + ': ' + depend[1] + ansi.reset) packages_to_reinstall.append(depend[0]) for conflict in conflict_problems: pr.p(ansi.red + 'conflict problem for ' + conflict[0].data['name'] + ': ' + conflict[1] + ansi.reset) packages_to_reinstall.append(conflict[0]) result_code = 1 else: pr.p(ansi.green + 'There is not any conflict or dependnecy problem and everything is ok' + ansi.reset) # check static files if not self.is_quiet(): pr.p('Checking packages static files...') staticfile_problems = [] for pkg in installed_packages: if self.is_verbose(): pr.p('[info] checking static files for ' + pkg.data['name'] + '...') files = pkg.installed_static_files() for f in files: f[1] = Env.base_path(f[1]) if os.path.isfile(f[1]): wanted_hash = f[0] current_hash = calc_file_sha256(f[1]) if wanted_hash != current_hash: staticfile_problems.append([ pkg, f, 'file is changed' ]) else: staticfile_problems.append([ pkg, f, 'file is deleted' ]) if staticfile_problems: for problem in staticfile_problems: pr.p(ansi.red + 'staticfile problem in package ' + problem[0].data['name'] + ': ' + problem[1][1] + ': ' + problem[2] + ansi.reset) packages_to_reinstall.append(problem[0]) result_code = 1 else: pr.p(ansi.green + 'all of static files are ok' + ansi.reset) # check repos config files health if not self.is_quiet(): pr.p('Checking cati configuration files...') if self.is_verbose(): pr.p('[info] checking repositories config...') repos = Repo.get_list() pr.p(ansi.red, end='') ReposListErrorShower.show(repos) pr.p(ansi.reset, end='') is_any_repo_error = False for repo in repos: if repo.syntax_errors: is_any_repo_error = True result_code = 1 if not is_any_repo_error: pr.p(ansi.green + 'all of cati configuration files are ok' + ansi.reset) # check database files if not self.is_quiet(): pr.p('Checking cati database...') database_problems = [] for f in os.listdir(Env.installed_lists()): if self.is_verbose(): pr.p('[info] checking database install dir for ' + f + '...') if not os.path.isfile(Env.installed_lists('/' + f + '/files')) or not os.path.isfile(Env.installed_lists('/' + f + '/ver')): database_problems.append('installed packages database: directory ' + Env.installed_lists('/' + f) + ' is corrupt') for f in os.listdir(Env.security_blacklist()): if self.is_verbose(): pr.p('[info] checking security blacklist part ' + f + '...') if not os.path.isfile(Env.security_blacklist('/' + f)): database_problems.append('security blacklist: an directory detected: ' + Env.security_blacklist('/' + f)) else: tmp = open(Env.security_blacklist('/' + f), 'r') try: json.loads(tmp.read()) except: database_problems.append('security blacklist: invalid json data in ' + Env.security_blacklist('/' + f)) if database_problems: for problem in database_problems: pr.p(ansi.red + 'database: ' + problem + ansi.reset) result_code = 1 else: pr.p(ansi.green + 'all of cati database is ok' + ansi.reset) if not self.is_quiet(): if packages_to_reinstall: pr.p(ansi.blue + 'We suggest re-install this packages:') for pkg in packages_to_reinstall: pr.p('- ' + pkg.data['name']) if not self.has_option('--autofix'): pr.p('use --autofix option to re-install them or do this manually') pr.p(ansi.reset, end='') else: pr.p(ansi.reset, end='') packages_names = [pkg.data['name'] for pkg in packages_to_reinstall] install_cmd = InstallCommand() args = ['cati', 'install', '--reinstall', *packages_names] cmd_str = '' for arg in args: cmd_str += arg + ' ' cmd_str = cmd_str.strip() pr.p(cmd_str) return install_cmd.handle(ArgParser.parse(args)) return result_code
def run(self): """ Run command """ RootRequired.require_root_permission() if not self.is_quiet(): pr.p('Loading repositories list...') repos = Repo.get_list() ReposListErrorShower.show(repos) if not self.is_quiet(): pr.p('Prepairing to update repos...') orig_repos = [] for repo in repos: if repo.successful_loaded: if repo.test(): if not repo.is_disable: orig_repos.append(repo) else: if not self.is_quiet(): self.message('Warning: ignoring repository "' + repo.name + '" because this is disable') else: pr.e(ansi.red + 'Cannot make connection to repo "' + repo.full_string + '"' + ansi.reset) if not self.is_quiet(): pr.p('Updating repositories...') pr.p('=============================') # downloaded repos data files paths downloaded_paths = [] # update repos for repo in list(reversed(orig_repos)): if not self.is_quiet(): pr.p('Fetching ' + repo.name + ' (' + repo.url + ') data...') data = repo.get_data(download_event=self.download_event) if type(data) == int: pr.e(ansi.red + 'Cannot update ' + repo.name + ' (' + repo.url + '): error code ' + str(data) + ansi.reset) elif isinstance(data, Exception): pr.e(ansi.red + 'Cannot update ' + repo.name + ' (' + repo.url + '): ' + str(data) + ansi.reset) else: # validate data try: tmp = json.loads(data) # save data in an file path = Env.cache_dir('/' + repo.name + '-' + str(time.time()) + str(random.random())) + '.json' f = open(path, 'w') f.write(data) f.close() downloaded_paths.append(path) except: pr.e(ansi.red + 'Cannot update ' + repo.name + ' (' + repo.url + '): invalid json data recived' + ansi.reset) if not self.is_quiet(): pr.p('Updating packages list...') # load downloaded data packages = [] for path in downloaded_paths: f = open(path, 'r') data = f.read().strip() f.close() items = json.loads(data) for item in items: if PackageJsonValidator.validate(item): packages.append(item) else: pass for pkg in packages: if PackageJsonValidator.validate(pkg): if self.is_verbose(): pr.p('adding ' + pkg['name'] + ':' + pkg['version'] + ':' + pkg['arch'] + '...') # write package on list if not os.path.isdir(Env.packages_lists('/' + pkg['name'])): os.mkdir(Env.packages_lists('/' + pkg['name'])) try: f = open( Env.packages_lists('/' + pkg['name'] + '/' + pkg['version'] + '-' + pkg['arch']), 'w') f.write(json.dumps(pkg)) f.close() ListUpdater.index_reverse_depends_and_conflicts(Pkg(pkg)) except: pr.e(ansi.red + 'error while adding ' + pkg['name'] + ':' + pkg['version'] + ':' + pkg['arch'] + ansi.reset) else: if self.is_verbose(): pr.p(ansi.yellow + 'invalid json data in an item. ignored...' + ansi.reset) if self.is_quiet(): pr.p('Finishing update...') ListUpdater.update_indexes({ 'cannot_read_file': self.empty_method, 'invalid_json_data': self.empty_method, }) pr.p(ansi.green + 'Done.' + ansi.reset)
def sub_install(self): """ install sub command (cati pkg install) """ if len(self.arguments) <= 1: self.message('argument package file(s) required') return 1 require_root_permission() # check transactions state before run new transactions pr.p('Checking transactions state...') state_list = BaseTransaction.state_list( ) # get list of undoned transactions if state_list: # the list is not empty StateContentShower.show(state_list) return 1 packages_to_install = [] i = 1 while i < len(self.arguments): try: pkg = archive_factory(self.arguments[i], 'r') pkg.read() pkg.package_file_path = os.path.abspath(self.arguments[i]) packages_to_install.append(pkg) except FileNotFoundError as ex: self.message('file "' + self.arguments[i] + '" not found' + ansi.reset, before=ansi.red) return 1 except: self.message('cannot open "' + self.arguments[i] + '": file is corrupt' + ansi.reset, before=ansi.red) return 1 i += 1 # add packages to state state_f = open(Env.state_file(), 'w') tmp = '' for pkg in packages_to_install: tmp += ('install%' + pkg.data['name'] + '%' + pkg.data['version'] + '%' + pkg.data['arch'] + '%' + pkg.package_file_path + '\n') state_f.write(tmp) state_f.close() packages_to_install_names_and_versions = [ pkg.data['name'] + '@' + pkg.data['version'] for pkg in packages_to_install ] i = 0 while i < len(packages_to_install): try: pkg = packages_to_install[i] tmp = self.install_once(pkg) if type(tmp) == int and tmp != 0: if not self.has_option('--dont-ignore-state'): BaseTransaction.finish_all_state() return tmp pkg.close() except: if not self.has_option('--dont-ignore-state'): BaseTransaction.finish_all_state() self.message('cannot install "' + packages_to_install[i].data['name'] + '"' + ansi.reset, before=ansi.red) return 1 i += 1 BaseTransaction.run_any_scripts( ['install', packages_to_install_names_and_versions], events={ 'start_run_script': self.start_run_any_script_event, }) BaseTransaction.finish_all_state()
def env(self, path=''): return Env.base_path(path)
def run(pkg: Pkg, events: dict, remove_conffiles=False, run_scripts=True): """ Remove pkg """ events['removing_package'](pkg) # run rm-before script if run_scripts: if os.path.isfile( Env.installed_lists('/' + pkg.data['name'] + '/rm-before')): os.system('chmod +x "' + Env.installed_lists('/' + pkg.data['name'] + '/rm-before') + '"') with_conffiles_arg = 'without-conffiles' if remove_conffiles: with_conffiles_arg = 'with-conffiles' os.system( Env.installed_lists('/' + pkg.data['name'] + '/rm-before') + ' ' + with_conffiles_arg) # remove package installed_files = open( Env.installed_lists('/' + pkg.data['name'] + '/files'), 'r').read() installed_files = installed_files.strip().split('\n') for f in list(reversed(installed_files)): if f != '': f_type = f.strip().split(':', 1)[0] f_path = f.strip().split(':', 1)[1] if f_type == 'f': if os.path.isfile(Env.base_path(f_path)): os.remove(Env.base_path(f_path)) elif f_type == 'd': try: os.rmdir(Env.base_path(f_path)) except: events['dir_is_not_empty'](pkg, f) elif f_type == 'cf': if remove_conffiles: if os.path.isfile(Env.base_path(f_path)): os.remove(Env.base_path(f_path)) else: Remove.add_to_unremoved_conffiles(pkg, f_path) elif f_type == 'cd': if remove_conffiles: try: os.rmdir(Env.base_path(f_path)) except: events['dir_is_not_empty'](pkg, f) else: Remove.add_to_unremoved_conffiles(pkg, f_path) # run rm-after script if run_scripts: if os.path.isfile( Env.installed_lists('/' + pkg.data['name'] + '/rm-after')): with_conffiles_arg = 'without-conffiles' if remove_conffiles: with_conffiles_arg = 'with-conffiles' os.system('chmod +x "' + Env.installed_lists('/' + pkg.data['name'] + '/rm-after') + '"') os.system( Env.installed_lists('/' + pkg.data['name'] + '/rm-after') + ' ' + with_conffiles_arg) # remove installation config shutil.rmtree(Env.installed_lists('/' + pkg.data['name'])) # remove any script if os.path.isfile(Env.any_scripts('/' + pkg.data['name'])): os.remove(Env.any_scripts('/' + pkg.data['name'])) events['package_remove_finished'](pkg)
def index_reverse_depends_and_conflicts(pkg: Pkg): """ Packages have `depends` and `conflicts` But also they have `Reverse` depends and conflicts Reverse d/c should be indexed because loading them real time is so big process We index them in a place, and when a package is added/changed, this function should be called Args: pkg (Pkg): changed/added package (reverse c/d will be set for that packages this package is related to them) """ # load the packages depend_pkgs = [] conflict_pkgs = [] for depend in pkg.get_depends(): query_parts = Pkg.check_state(depend, only_parse=True) for depth1 in query_parts: for depth2 in depth1: depend_pkgs.append(depth2[0]) for conflict in pkg.get_conflicts(): query_parts = Pkg.check_state(conflict, only_parse=True) for depth1 in query_parts: for depth2 in depth1: conflict_pkgs.append(depth2[0]) # set reverse depends/conflicts for found packages for p in depend_pkgs: f_path = Env.packages_lists('/' + p + '/reverse_depends') current_list = None try: if not os.path.isdir(Env.packages_lists('/' + p)): os.mkdir(Env.packages_lists('/' + p)) if not os.path.isfile(f_path): current_list = [] else: f = open(f_path, 'r') current_list = [ item.strip() for item in f.read().strip().split('\n') if item.strip() != '' ] f.close() if not pkg.data['name'] in current_list: current_list.append(pkg.data['name']) # write new list new_list_str = '' for item in current_list: new_list_str += item + '\n' new_list_str = new_list_str.strip() f = open(f_path, 'w') f.write(new_list_str) f.close() except: pass for p in conflict_pkgs: f_path = Env.packages_lists('/' + p + '/reverse_conflicts') current_list = None try: if not os.path.isdir(Env.packages_lists('/' + p)): os.mkdir(Env.packages_lists('/' + p)) if not os.path.isfile(f_path): current_list = [] else: f = open(f_path, 'r') current_list = [ item.strip() for item in f.read().strip().split('\n') if item.strip() != '' ] f.close() if not pkg.data['name'] in current_list: current_list.append(pkg.data['name']) # write new list new_list_str = '' for item in current_list: new_list_str += item + '\n' new_list_str = new_list_str.strip() f = open(f_path, 'w') f.write(new_list_str) f.close() except: pass
def finish_all_state(): """ clear all of states """ f = open(Env.state_file(), 'w') f.write('') f.close()