def get_list() -> list: """ returns list of blaclist items Returns: list: returns list of blacklist items """ db_parts = os.listdir(Env.security_blacklist()) blacklist = [] for part in db_parts: try: f = open(Env.security_blacklist('/' + part), 'r') content = f.read() content = json.loads(content) f.close() except: continue for item in content: try: assert type(item['title']) == str assert type(item['description']) == str assert type(item['md5']) == str assert type(item['sha256']) == str assert type(item['sha512']) == str blacklist.append(item) except: pass return blacklist
def load_version(pkg_name: str, version: str, arch=''): """ loads a specify version of a package Args: pkg_name (str): name of package version (str): that version you want to load arch (str): load specify arch (optional) Returns: int (1): package not found int (2): package found, but version/arch not found Pkg object: package and version found and returned """ if not os.path.isfile(Env.packages_lists('/' + pkg_name + '/index')): return 1 # load package index file f_index = open(Env.packages_lists('/' + pkg_name + '/index'), 'r') index_content = f_index.read() f_index.close() try: # load json index = json.loads(index_content) except: return 1 # load package versions list versions = [] try: versions = index[arch] except: try: versions = index[SysArch.sys_arch()] arch = SysArch.sys_arch() except: try: versions = index['all'] arch = 'all' except: versions = index[list(index.keys())[0]] arch = list(index.keys())[0] for ver in versions: if ver == version: # load this version data try: f_ver = open( Env.packages_lists('/' + pkg_name + '/' + ver + '-' + arch), 'r') f_ver_content = f_ver.read() ver_data = json.loads(f_ver_content) return Pkg(ver_data) except: return 2 return 2
def run(self): """ Run command """ if self.has_option('--edit') or self.has_option('-e'): return os.system('vim "' + Env.repos_config() + '"') if self.has_option('--add') or self.has_option('-a'): RootRequired.require_root_permission() repo_string = '' for arg in self.arguments: repo_string += arg + ' ' repo_string = repo_string.strip() tmp_repo = Repo(repo_string) tmp_repo.loaded_from_file = 'argument' tmp_repo.line_number = 0 if not tmp_repo.successful_loaded: ReposListErrorShower.show([tmp_repo]) return 1 # write repo path = Env.repos_config_dir('/' + tmp_repo.name + '-' + tmp_repo.get_pkg_str() + '-' + tmp_repo.get_arch_str()) tmp = '' tmp_i = 1 while os.path.isfile(path + tmp): tmp = '-' + str(tmp_i) f = open(path, 'w') f.write('# added manually\n' + repo_string) f.close() return 0 if self.has_option('--scan'): for arg in self.arguments: if os.path.isdir(arg): Scanner.scan(arg) pr.p(ansi.green + 'directory ' + arg + ' scanned successfully' + ansi.reset) else: self.message('directory' + arg + ' not found', is_error=True) return 0 # show list of repos if not self.is_quiet(): pr.p('Loading repositories list...') repos = Repo.get_list() if not self.is_quiet(): pr.p('============================') ReposListErrorShower.show(repos) for repo in repos: if repo.successful_loaded: pr.p(repo.name + ': ' + repo.url + ' pkg=' + repo.get_pkg_str() + ' arch=' + repo.get_arch_str() + ' channel=' + repo.get_channel_str())
def pop_state(): """ remove first item from state """ f = open(Env.state_file(), 'r') content = f.read() f.close() content = content.strip() lines = content.split('\n') if lines: lines.pop(0) new_content = '' for line in lines: new_content += line + '\n' f = open(Env.state_file(), 'w') f.write(new_content) f.close()
def set_manual_installs(self, packages): """ Sets installed packages type (manual/auto) """ for pkg in packages: if not pkg.is_manual: path = Env.installed_lists('/' + pkg.data['name'] + '/manual') if os.path.isfile(path): os.remove(path)
def load_from_index(index_json: dict, package_name: str): """ Loads package data from index file Args: package_name (str): name of the package index_json (dict): loaded json data from `/var/lib/cati/lists/<pkgname>/index` file Returns: Pkg: the loaded Pkg object """ try: arch = sys_arch() versions = index_json[arch] except: try: arch = 'all' versions = index_json[arch] except: arch = list(index_json.keys())[0] versions = index_json[arch] # load latest version ver = Pkg.get_last_version(versions) f = open( Env.packages_lists('/' + package_name + '/' + ver + '-' + arch), 'r') content = f.read() f.close() content_json = json.loads(content) return Pkg(content_json)
def get_all_installed_files_list() -> list: """ returns list of all of installed files Returns: list: [ ['pkgname', 'filetype(d,f,cd,cf)', '/file/path'], ['pkg1', 'f', '/path/to/file'], ... ] """ # load list of installed packages installed_packages = Pkg.installed_list()['list'] result = [] for pkg in installed_packages: pkg_installed_files = open( Env.installed_lists('/' + pkg.data['name'] + '/files'), 'r').read() pkg_installed_files = pkg_installed_files.strip().split('\n') pkg_installed_files = [ item.strip().split(':', 1) for item in pkg_installed_files ] for f in pkg_installed_files: if len(f) > 1: result.append([pkg.data['name'], f[0], f[1]]) return result
def state_list(): """ returns list of undoned transactions from state file """ f = open(Env.state_file(), 'r') content = f.read() f.close() content = content.strip().split('\n') content = [line.strip() for line in content] result = [] for item in content: if item != '': tmp = {} parts = item.split('%') tmp['action'] = parts[0] tmp['pkg'] = parts[1] try: tmp['version'] = parts[2] except: tmp['version'] = None try: tmp['arch'] = parts[3] except: tmp['arch'] = None try: tmp['file'] = parts[4] except: tmp['file'] = None result.append(tmp) return result
def get_list() -> list: """ returns list of repositories Returns: list[Repo]: list of loaded repositories """ repos = [] files = [Env.repos_config()] for item in os.listdir(Env.repos_config_dir()): if os.path.isfile(Env.repos_config_dir('/' + item)): files.append(Env.repos_config_dir('/' + item)) for fl in files: f = open(fl, 'r') repos_content = f.read() f.close() lines = repos_content.split('\n') line_counter = 1 for line in lines: line = line.split('#')[0].strip() line = line.strip() if line != '': repo = Repo(line) repo.line_number = line_counter repo.loaded_from_file = fl repos.append(repo) line_counter += 1 # sort by priority sorted_repos = [] while len(repos): i = 0 while i < len(repos): is_less_than_all = True j = 0 while j < len(repos): if int(repos[i].priority) > int(repos[j].priority): is_less_than_all = False j += 1 if is_less_than_all: sorted_repos.append(repos[i]) repos.pop(i) i += 1 return sorted_repos
def is_installed(package_name: str) -> bool: """ Gets a package name and checks is installed or not Args: package_name (str): the package name you want to check is installed Returns: bool """ try: assert os.path.isdir(Env.installed_lists('/' + package_name)) assert os.path.isfile( Env.installed_lists('/' + package_name + '/ver')) assert os.path.isfile( Env.installed_lists('/' + package_name + '/files')) return True except: return False
def add_to_unremoved_conffiles(pkg: Pkg, filepath: str): """ Adds filepath to list of unremoved conffiles """ f = open(Env.unremoved_conffiles(), 'r') filelist = f.read().strip().split('\n') f.close() # add item to list if not filepath in filelist: filelist.append(filepath) # generate new content of unremoved_conffiles file new_content = '' for item in filelist: new_content += item + '\n' # write new content to file f = open(Env.unremoved_conffiles(), 'w') f.write(new_content) f.close()
def run(self): """ Run test """ self.assert_equals( self.run_command( 'pkg', ['install', 'repository/test-repository/testpkgc-2.0.cati']), 0) state_f = open(Env.state_file(), 'w') state_f.write(f'install%testpackage1%1.0%amd64\nremove%anotherpackage') state_f.close() self.assert_equals(self.run_command('remove', [ 'testpkgc', '-y', ]), 1) self.assert_equals( self.run_command( 'pkg', ['install', 'repository/test-repository/testpkgc-2.0.cati']), 1) # tests for cli `state` command self.assert_equals(self.run_command('state'), 1) self.assert_equals(self.run_command('state', ['--abort', '-y']), 0) self.assert_equals(self.run_command('state'), 0) self.refresh_env() self.assert_equals( self.run_command( 'pkg', ['install', 'repository/test-repository/testpkgc-2.0.cati']), 0) self.assert_true(Pkg.is_installed('testpkgc')) state_f = open(Env.state_file(), 'w') state_f.write(f'remove%testpkgc') state_f.close() self.assert_equals(self.run_command('state'), 1) self.assert_equals(self.run_command('state', ['--complete']), 0) self.assert_equals(self.run_command('state'), 0) self.assert_true(not Pkg.is_installed('testpkgc'))
def run_any_scripts(runed_transactions: list, events: dict): """ run all of `any` scripts. events: - start_run_script: will run when starting to run once script (gets package name) """ runed_transactions_str = '' for rt in runed_transactions[1]: runed_transactions_str += rt + ' ' runed_transactions_str = runed_transactions_str.strip() scripts = os.listdir(Env.any_scripts()) for script in scripts: events['start_run_script'](script) # run script os.system('chmod +x "' + Env.any_scripts('/' + script) + '"') os.system( Env.any_scripts('/' + script) + ' ' + runed_transactions[0] + ' ' + runed_transactions_str)
def all_list(): """ Returns list of packages Returns: dict: output has two keys: { "list": list[Pkg] // list of packages "errors": list // errors while loading packages } """ errors = [] packages = [] tmp_list = os.listdir(Env.packages_lists()) tmp_list.sort() for item in tmp_list: if os.path.isfile(Env.packages_lists('/' + item + '/index')): f_index = open(Env.packages_lists('/' + item + '/index'), 'r') try: index_content = f_index.read() try: index_json = json.loads(index_content) try: pkg = Pkg.load_from_index(index_json, item) packages.append(pkg) except: errors.append('faild to load package "' + item + '"') except: errors.append('invalid json content in "' + Env.packages_lists('/' + item + '/index') + '"') except: errors.append('cannot read file "' + Env.packages_lists('/' + item + '/index') + '"') else: errors.append( f'package "{item}" has not index file in lists ({Env.packages_lists("/" + item + "/index")} not found)' ) return {'list': packages, 'errors': errors}
def add_to_state(calc: Calculator): """ add new item to state """ content = '' for item in calc.get_sorted_list(): content += item['action'] + '%' + item['pkg'].data[ 'name'] + '%' + item['pkg'].data['version'] + '%' + item[ 'pkg'].data['arch'] + '\n' f = open(Env.state_file(), 'w') f.write(content) f.close()
def require_root_permission(is_cli=True, die_action=None): """ checks root premission. Args: is_cli (bool): if is True, when user have not root permission, error will print in terminal. but if is False, the `die_action` will run as a function. (will be disable in testing environment) die_action (callable): the function will be run when `is_cli` is False """ # if program is in testing mode don't check permission if is_testing: return if os.getuid() == 0: return # check write and read access for needed files files_to_check = [ Env.packages_lists(), Env.installed_lists(), Env.state_file(), Env.unremoved_conffiles(), Env.security_blacklist(), Env.any_scripts(), Env.repos_config(), Env.repos_config_dir(), Env.cache_dir(), Env.allowed_archs(), ] for f in files_to_check: if not os.access(f, os.W_OK) or not os.access(f, os.R_OK): if is_cli: pr.e(ansi.red + sys.argv[0] + ': permission is denied' + ansi.reset) pr.exit(1) return else: die_action() return
def update_indexes(events: dict): """ This function loads available versions of a package and index them in index file and do this action for all of packages in lists. Args: events: (dict) the `events` argument should be a dictonary from functions. this will use to handle errors for example if some thing went wrong, the spesific function in events will run. events: - cannot_read_file: if in this process, an error happened while reading a file, this will run with file path arg - invalid_json_data: if json content of a file is curropt, this will run with file path and content args """ require_root_permission() for pkg in os.listdir(Env.packages_lists()): pkg_index = {} if os.path.isdir(Env.packages_lists('/' + pkg)): for version in os.listdir(Env.packages_lists('/' + pkg)): if version != 'index': if os.path.isfile( Env.packages_lists('/' + pkg + '/' + version)): content = None try: f = open( Env.packages_lists('/' + pkg + '/' + version), 'r') content = f.read() except: events['cannot_read_file']( Env.packages_lists('/' + pkg + '/' + version)) if content != None: try: content_json = json.loads(content) try: tmp = pkg_index[content_json['arch']] del tmp except: pkg_index[content_json['arch']] = [] pkg_index[content_json['arch']].append( content_json['version']) except: events['invalid_json_data']( Env.packages_lists('/' + pkg + '/' + version), content) # write generated index to index file f_index = open(Env.packages_lists('/' + pkg + '/index'), 'w') f_index.write(json.dumps(pkg_index)) f_index.close()
def installed_version(package_name: str) -> str: """ Gets name of package and returns installed version of that Args: package_name (str): name of package Returns: str: installed version Raises: package.exceptions.CannotReadFileException: when cannot read package database files """ try: f = open(Env.installed_lists('/' + package_name + '/ver'), 'r') version = f.read() f.close() except: raise CannotReadFileException( 'cannot read file "' + Env.installed_lists('/' + package_name + '/ver') + '"') return version
def check(events: dict): """ Check all of needed files and dirs for cati installation Args: events: - failed_to_repair: will run when cati installation is corrupt and user has not root permission to repair it and passes filepath and type of that to function """ required_files = [ '/var/lib/cati/state.f', '/var/lib/cati/unremoved-conffiles.list', '/etc/cati/repos.list', '/etc/cati/allowed-architectures.list', ] required_dirs = [ '/var', '/var/lib', '/var/lib/cati', '/var/lib/cati/lists', '/var/lib/cati/installed', '/var/lib/cati/security-blacklist', '/var/lib/cati/any-scripts', '/var/cache', '/var/cache/cati', '/var/cache/cati/archives', '/etc', '/etc/cati', '/etc/cati/repos.list.d', ] for d in required_dirs: if not os.path.isdir(Env.base_path('/' + d)): repair_once_dir(d, events) for f in required_files: if not os.path.isfile(Env.base_path('/' + f)): repair_once_file(f, events)
def run(self): """ Run test """ self.assert_equals( self.run_command('pkg', [ 'install', 'repository/test-repository/simple-test-package.cati' ]), 0) self.assert_true(os.path.isfile(self.env('/usr/bin/cati-testpkga'))) self.assert_equals( self.run_command('pkg', [ 'install', 'repository/test-repository/simple-test-package.cati' ]), 0) self.assert_true(os.path.isfile(self.env('/usr/bin/cati-testpkga'))) pkg = Pkg.load_from_index( json.loads( open(Env.packages_lists('/testpkga/index'), 'r').read()), 'testpkga') self.assert_equals(pkg.installed(), '1.0') self.refresh_env() self.assert_equals( self.run_command('pkg', [ 'install', 'repository/test-repository/testpkg-with-file-conflict-a.cati', ]), 0) self.assert_true( os.path.isfile(self.env() + '/etc/testpkg-with-file-conflict/test.txt')) self.refresh_env() os.mkdir(self.env() + '/app') self.assert_equals( self.run_command('pkg', [ 'install', 'repository/test-repository/testpkg-with-file-conflict-a.cati', '--target=/app' ]), 0) self.assert_true(not os.path.isfile( self.env() + '/etc/testpkg-with-file-conflict/test.txt')) self.assert_true( os.path.isfile(self.env() + '/app/etc/testpkg-with-file-conflict/test.txt'))
def is_installed_manual(package_name: str) -> bool: """ Gets a package name and checks is installed MANUAL or not Args: package_name (str): the package name you want to check is installed Returns: bool """ if not Pkg.is_installed(package_name): return False return os.path.isfile( Env.installed_lists('/' + package_name + '/manual'))
def repair_once_dir(dirpath: str, events: dict): """ Repairs once dir Args: dirpath (str): the dirpath to repair events (dict): - faild_to_repair """ try: os.mkdir(Env.base_path('/' + dirpath)) except: events['failed_to_repair']('/' + dirpath, 'dir')
def copy_once_file(self, paths): """ Copy one of package files (this method is called from `copy_files` method) """ if os.path.isfile(paths[1]): if paths[0] in self.conffiles: self.copied_files.append('cf:' + paths[0]) old_conffiles = [item[-1] for item in self.old_conffiles] if paths[0] in old_conffiles: f_hash = calc_file_sha256(paths[1]) if [f_hash, paths[0]] in self.old_conffiles: self.uncopied_conffiles[paths[0]] = f_hash return else: if self.keep_conffiles: self.uncopied_conffiles[paths[0]] = f_hash return else: self.copied_files.append('f:' + paths[0]) os.system('cp "' + paths[1] + '" "' + Env.base_path(paths[0]) + '"') else: os.mkdir(Env.base_path(paths[0])) if paths[1] in self.conffiles: self.copied_files.append('cd:' + paths[0]) else: self.copied_files.append('d:' + paths[0])
def run(self): """ Run command """ RootRequired.require_root_permission() for f in os.listdir(Env.cache_dir()): if os.path.isfile(Env.cache_dir('/' + f)): if self.is_verbose(): pr.p('removing ' + Env.cache_dir('/' + f) + '...') os.remove(Env.cache_dir('/' + f)) for f in os.listdir(Env.cache_dir('/archives')): if os.path.isfile(Env.cache_dir('/archives/' + f)): if self.is_verbose(): pr.p('removing ' + Env.cache_dir('/archives/' + f) + '...') os.remove(Env.cache_dir('/archives/' + f)) pr.p(ansi.green + 'Cache files cleared successfully' + ansi.reset)
def repair_once_file(filepath: str, events: dict): """ Repairs once file Args: filepath (str): the filepath to repair events (dict): - faild_to_repair """ try: f = open(Env.base_path('/' + filepath).replace('//', '/'), 'w') f.write('') f.close() except: events['failed_to_repair']('/' + filepath, 'file')
def run(): """ start running tests """ print('Starting test system...') print('=======================') # load test environment print('Loading test environment...', end=' ') load_test_env() print(ansi.green + 'created in ' + Env.base_path() + ansi.reset) print() # enable testing mode pr.is_testing = True SysArch.is_testing = True # load tests list tests_list = os.listdir('tests/items') # clean up tests list orig_tests = [] for test in tests_list: if test[len(test) - 3:] == '.py': exec('from items.' + test[:len(test) - 3] + ' import ' + test[:len(test) - 3]) exec("orig_tests.append(" + test[:len(test) - 3] + "())") # start running tests count = 0 for test in orig_tests: test_name = test.get_name() print('\t[' + str(count + 1) + '/' + str(len(orig_tests)) + ']\t' + test_name.replace('_', ' ') + ': ', end='', flush=True) test.refresh_env() test.run() test.refresh_env() print(ansi.green + 'PASS' + ansi.reset) count += 1 print() print(ansi.green + 'All ' + str(count) + ' tests passed successfuly') print('Cleaning up...' + ansi.reset) if os.path.isfile('testpkgc-1.0.cati'): os.remove('testpkgc-1.0.cati') shutil.rmtree(Env.base_path_dir) Temp.clean()
def installed_static_files(self) -> list: """ returns list of installed files of package Returns: list[str]: files paths """ if not self.installed(): return False installed_static_files_list = open( Env.installed_lists('/' + self.data['name'] + '/staticfiles'), 'r').read().strip() installed_static_files_list = installed_static_files_list.split('\n') installed_static_files_list = [ item.strip().split('@', 1) for item in installed_static_files_list if item.strip() != '' ] return installed_static_files_list
def get_versions_list(self): """ returns versions list of the package Returns: list: list of versions: [ [<version>, <arch>] ] """ try: f_index = open( Env.packages_lists('/' + self.data['name'] + '/index'), 'r') index_json = json.loads(f_index.read()) f_index.close() except: return [] try: versions = [] for k in index_json: for ver in index_json[k]: versions.append([ver, k]) return versions except: return []
def env(self, path=''): return Env.base_path(path)
def check_state(query_string: str, virtual=None, get_false_pkg=False, get_false_pkg_next=0, get_true_pkg=False, get_true_pkg_next=0) -> bool: """ Checks package state by query string. Examples: "somepackage >= 1.5", "somepackage", "somepackage = 2.0", "somepackage < 1.7", "pkga | pkgb >= 1.0", "pkga | pkgb | pkgc", "pkga | pkgb & pkgc = 1.0", also there is a feature to check files: "@/usr/bin/somefile", "somepackage | @/path/to/somefile", "testpkga >= 3.0 | @/somefile" "@/file/one | @/file/two", "@<sha256-hash>@/path/to/file", "@76883f0fd14015c93296f0e4202241f4eb3a23189dbc17990a197477f1dc441a@/path/to/file" `virtual` argument: this argument can make a virtual package state for example package `testpkgz` is not installed, but we want to check the query when this is installed we can set that package in virtual system to query checker calculate tha package as installed/removed virtual structure: this is dictonary: { 'install': [ ## a list from installed packages: ['testpkgx', '1.0'], ['testpkgz', '3.7.11'], ... ] 'remove': [ ## a list from removed packages: ['testpkgx', '1.0'], ['testpkgz', '3.7.11'], ... ] ## set it True if you want to ignore real installations 'no_real_installs': True/False ## set it True if you want to ignore real not installations 'all_real_is_installed': True/False } """ # parse query string parts = query_string.strip().split('|') orig_parts = [] for part in parts: tmp = part.strip().split('&') orig_parts.append(tmp) # load virtual item no_real_installs = False all_real_is_installed = False if virtual: try: tmp = virtual['install'] except: virtual['install'] = [] try: tmp = virtual['remove'] except: virtual['remove'] = [] virtual_installed_names_list = [] virtual_installed_versions_dict = {} for item in virtual['install']: virtual_installed_versions_dict[item[0]] = item[1] virtual_installed_names_list.append(item[0]) virtual_removed_names_list = [] virtual_removed_versions_dict = {} for item in virtual['remove']: virtual_removed_versions_dict[item[0]] = item[1] virtual_removed_names_list.append(item[0]) try: no_real_installs = virtual['no_real_installs'] except: no_real_installs = False try: all_real_is_installed = virtual['all_real_is_installed'] except: all_real_is_installed = False else: virtual_installed_names_list = [] virtual_installed_versions_dict = {} virtual_removed_names_list = [] virtual_removed_versions_dict = {} no_real_installs = False all_real_is_installed = False # parse once query i = 0 while i < len(orig_parts): j = 0 while j < len(orig_parts[i]): orig_parts[i][j] = orig_parts[i][j].strip() spliter = None if '>=' in orig_parts[i][j]: spliter = '>=' elif '<=' in orig_parts[i][j]: spliter = '<=' elif '>' in orig_parts[i][j]: spliter = '>' elif '<' in orig_parts[i][j]: spliter = '<' elif '=' in orig_parts[i][j]: spliter = '=' if spliter != None: orig_parts[i][j] = orig_parts[i][j].split(spliter) orig_parts[i][j].insert(1, spliter) else: orig_parts[i][j] = [orig_parts[i][j]] z = 0 while z < len(orig_parts[i][j]): orig_parts[i][j][z] = orig_parts[i][j][z].strip() z += 1 j += 1 i += 1 # check query for tmp in orig_parts: ands_ok = True for p in tmp: if p[0][0] == '@': # check file query parts = p[0].split('@') if len(parts) == 2: if not os.path.exists(Env.base_path(parts[-1])): ands_ok = False elif len(parts) == 3: if not os.path.exists(Env.base_path(parts[-1])): ands_ok = False else: if os.path.isfile(Env.base_path(parts[-1])): sha256_sum = calc_file_sha256( Env.base_path(parts[-1])) if parts[1] != sha256_sum: ands_ok = False elif not p[ 0] in virtual_installed_names_list and no_real_installs: ands_ok = False if get_false_pkg and get_false_pkg_next <= 0: return p else: get_false_pkg_next -= 1 elif not p[ 0] in virtual_removed_names_list and all_real_is_installed: pass elif len(p) == 1: if not Pkg.is_installed(p[0]) and not p[ 0] in virtual_installed_names_list or p[ 0] in virtual_removed_names_list: ands_ok = False if get_false_pkg and get_false_pkg_next <= 0: return p else: get_false_pkg_next -= 1 elif len(p) == 3: if not Pkg.is_installed(p[0]) and not p[ 0] in virtual_installed_names_list or p[ 0] in virtual_removed_names_list: ands_ok = False if get_false_pkg and get_false_pkg_next <= 0: return p else: get_false_pkg_next -= 1 else: if p[0] in virtual_installed_names_list: a_ver = virtual_installed_versions_dict[p[0]] else: a_ver = Pkg.installed_version(p[0]) b_ver = p[2] if p[1] == '=': if Pkg.compare_version(a_ver, b_ver) != 0: ands_ok = False if get_false_pkg and get_false_pkg_next <= 0: return p else: get_false_pkg_next -= 1 elif p[1] == '>': if Pkg.compare_version(a_ver, b_ver) != 1: ands_ok = False if get_false_pkg and get_false_pkg_next <= 0: return p else: get_false_pkg_next -= 1 elif p[1] == '<': if Pkg.compare_version(a_ver, b_ver) != -1: ands_ok = False if get_false_pkg and get_false_pkg_next <= 0: return p else: get_false_pkg_next -= 1 elif p[1] == '<=': if Pkg.compare_version( a_ver, b_ver) != -1 and Pkg.compare_version( a_ver, b_ver) != 0: ands_ok = False if get_false_pkg and get_false_pkg_next <= 0: return p else: get_false_pkg_next -= 1 elif p[1] == '>=': if Pkg.compare_version( a_ver, b_ver) != 1 and Pkg.compare_version( a_ver, b_ver) != 0: ands_ok = False if get_false_pkg and get_false_pkg_next <= 0: return p else: get_false_pkg_next -= 1 else: ands_ok = False if get_false_pkg and get_false_pkg_next <= 0: return p else: get_false_pkg_next -= 1 if ands_ok and get_true_pkg and get_true_pkg_next <= 0: return p else: get_true_pkg_next -= 1 if ands_ok: return True return False