def package(): params = {'arch': 'x86', 'osname': 'win', 'package': 'package'} params_found = {'repo': 'lib-cpp-release', 'version': '1.2.3'} _package = Package('package', None, params, None, None, None, params_found, None, None) _package.unpacked_path = "/test/path" yield _package
def __init__(self, config, do_load): self._log = logging.getLogger('crosspm') self._config = config # type: Config self.cache = config.cache self.solid = config.solid self.common_parser = Parser('common', {}, config) self._root_package = Package('<root>', 0, {self._config.name_column: '<root>'}, self, None, self.common_parser) if not config.deps_path: config.deps_path = \ config.deps_file_name if config.deps_file_name else CROSSPM_DEPENDENCY_FILENAME deps_path = config.deps_path if deps_path.__class__ is DependenciesContent: # HACK pass self._deps_path = deps_path else: deps_path = config.deps_path.strip().strip('"').strip("'") self._deps_path = os.path.realpath(os.path.expanduser(deps_path)) if not config.depslock_path: config.depslock_path = \ config.deps_lock_file_name if config.deps_lock_file_name else CROSSPM_DEPENDENCY_LOCK_FILENAME depslock_path = config.depslock_path if depslock_path.__class__ is DependenciesContent: # HACK self._depslock_path = depslock_path else: depslock_path = depslock_path.strip().strip('"').strip("'") self._depslock_path = os.path.realpath( os.path.expanduser(depslock_path)) self.do_load = do_load
def package_root(): """ Create root with dependencies: root - package1 - package11 - package12 - package2 """ params = {'arch': 'x86', 'osname': 'win', 'package': 'root'} _root = Package('root', None, params, None, None, None, None, None, None) params = {'arch': 'x86', 'osname': 'win', 'package': 'package1'} _package1 = Package('package1', None, params, None, None, None, None, None, None) params = {'arch': 'x86', 'osname': 'win', 'package': 'package11'} _package11 = Package('package11', None, params, None, None, None, None, None, None) params = {'arch': 'x86', 'osname': 'win', 'package': 'package12'} _package12 = Package('package12', None, params, None, None, None, None, None, None) params = {'arch': 'x86', 'osname': 'win', 'package': 'package2'} _package2 = Package('package2', None, params, None, None, None, None, None, None) _package1.packages = OrderedDict([('package11', _package11), ('package12', _package12)]) _root.packages = OrderedDict([('package2', _package2), ('package1', _package1)]) for _package in _root.all_packages: _package.unpacked_path = "/test/path/{}".format(_package.name) yield _root
def __init__(self, config, depslock_path='', do_load=True): self._log = logging.getLogger(__name__) self._config = config self._root_package = Package('<root>', 0, {self._config.name_column: '<root>'}, self, None, config.get_parser('common')) self._cache_path = config.crosspm_cache_root if not os.path.exists(self._cache_path): os.makedirs(self._cache_path) self.packed_path = os.path.realpath(os.path.join(self._cache_path, 'archive')) self.unpacked_path = os.path.realpath(os.path.join(self._cache_path, 'cache')) self.temp_path = os.path.realpath(os.path.join(self._cache_path, 'tmp')) if not depslock_path: depslock_path = config.deps_lock_file_name if config.deps_lock_file_name else CROSSPM_DEPENDENCY_LOCK_FILENAME self._depslock_path = os.path.realpath(depslock_path) self.do_load = do_load
def __init__(self, config, do_load, recursive, parser_class=Parser): self._log = logging.getLogger('crosspm') self._config = config # type: Config self._root_package = Package('<root>', 0, None, {self._config.name_column: '<root>'}, self, None, None) self.recursive = recursive self.do_load = do_load deps_path = config.deps_path.strip().strip('"').strip("'") self._deps_path = os.path.realpath(os.path.expanduser(deps_path)) if not config.depslock_path: config.depslock_path = \ config.deps_lock_file_name if config.deps_lock_file_name else CROSSPM_DEPENDENCY_LOCK_FILENAME depslock_path = config.depslock_path.strip().strip('"').strip("'") self._depslock_path = os.path.realpath( os.path.expanduser(depslock_path))
class Downloader(Command): def __init__(self, config, do_load, recursive, parser_class=Parser): self._log = logging.getLogger('crosspm') self._config = config # type: Config self._root_package = Package('<root>', 0, None, {self._config.name_column: '<root>'}, self, None, None) self.recursive = recursive self.do_load = do_load deps_path = config.deps_path.strip().strip('"').strip("'") self._deps_path = os.path.realpath(os.path.expanduser(deps_path)) if not config.depslock_path: config.depslock_path = \ config.deps_lock_file_name if config.deps_lock_file_name else CROSSPM_DEPENDENCY_LOCK_FILENAME depslock_path = config.depslock_path.strip().strip('"').strip("'") self._depslock_path = os.path.realpath( os.path.expanduser(depslock_path)) def update_progress(self, msg, progress): self._log.info('\r{0} [{1:10}] {2}%'.format( msg, '#' * int(float(progress) / 10.0), int(progress))) # Get list of all packages needed to resolve all the dependencies. # List of Package class instances. def get_dependency_packages(self, package_matches, property_validate=True): _packages = OrderedDict() for i, _src in enumerate(self._config.sources()): self._log.info('') self._log.info(f'source: {_src}') _found_packages = _src.get_packages(self, package_matches, property_validate) _packages.update( OrderedDict([(k, v) for k, v in _found_packages.items() if _packages.get(k, None) is None])) return _packages def get_usedby_packages(self, list_or_file_path=None, property_validate=True): """ :param list_or_file_path: :param property_validate: for `root` packages we need check property, bad if we find packages from `lock` file, we can skip validate part :return: """ if list_or_file_path is None: list_or_file_path = self._depslock_path if not os.path.isfile(list_or_file_path): list_or_file_path = self._deps_path _packages = OrderedDict() if isinstance(list_or_file_path, str): self._log.info('Reading dependencies ... [%s]', list_or_file_path) for i, _src in enumerate(self._config.sources()): if i > 0: self._log.info('') self._log.info('Next source ...') _found_packages = _src.get_usedby(self, list_or_file_path, property_validate) _packages.update( OrderedDict([(k, v) for k, v in _found_packages.items() if _packages.get(k, None) is None])) if not self._config.no_fails: if isinstance(list_or_file_path, (list, tuple)): list_or_file_path = [ x for x in list_or_file_path if _packages.get( x[self._config.name_column], None) is None ] elif isinstance(list_or_file_path, dict) and isinstance( list_or_file_path.get('raw', None), list): list_or_file_path['raw'] = [ x for x in list_or_file_path['raw'] if _packages.get( x[self._config.name_column], None) is None ] return _packages def find_package_in_artifactory(self, package): return # Download packages or just unpack already loaded (it's up to adapter to decide) def download_packages(self): with open(self._config.depslock_path) as f: packages_path_in_repo = DepsTxtLockListFormatter.read_packages_from_lock_file( f) session = requests.Session() packages_not_found = [] output_path = self._config.output_path if output_path is not None and output_path != '': pathlib.Path(output_path).mkdir(parents=True, exist_ok=True) for package_path in packages_path_in_repo: errors = {} package_download_done = False for src in self._config.sources(): if package_download_done: break session.auth = src.get_auth_params().auth packages = src.generate_full_urls_from_package_path_in_repo( package_path) # packages = self.find_package_in_artifactory(src, package) for p in packages: try: ap = ArtifactoryPath(p, session=session) dst_path = os.path.join(output_path, ap.name) with ap.open() as input, open(dst_path, "wb") as output: shutil.copyfileobj(input, output) package_download_done = True self._log.info( f"Success {ap} downloaded to {dst_path}") break except RuntimeError as e: if isinstance(e.args[0], int): errors[ap] = http.HTTPStatus(e.args[0]) else: errors[ap] = e if not package_download_done: packages_not_found += [package_path] self._log.error( f"PACKAGE NOT FOUND {package_path}. Errors for attempts {errors}" ) if packages_not_found: raise CrosspmException( CROSSPM_ERRORCODE_PACKAGE_NOT_FOUND, 'Some package(s) not found: {}'.format( ', '.join(packages_not_found))) def entrypoint(self, *args, **kwargs): self.download_packages(*args, **kwargs) def search_dependencies(self, depslock_file_path, deps_content=None): self._log.info('Check dependencies ...') self._root_package.find_dependencies( depslock_file_path, property_validate=True, deps_content=deps_content, ) self._log.info('') self.check_not_found() def check_not_found(self): _not_found = self.get_not_found_packages() if _not_found: raise CrosspmException( CROSSPM_ERRORCODE_PACKAGE_NOT_FOUND, 'Some package(s) not found: {}'.format(', '.join(_not_found))) def get_not_found_packages(self): return self._root_package.get_none_packages() def add_package(self, pkg_name, package): _added = False if package is not None: _added = True return _added, package def get_raw_packages(self): """ Get all packages :return: list of all packages """ return self._root_package.all_packages def get_tree_packages(self): """ Get all packages, with hierarchy :return: list of first level packages, with child """ return self._root_package.packages def iter_packages_params(self, list_or_file_path, deps_content=None): return self.common_parser.iter_packages_params(list_or_file_path, deps_content)
def get_usedby(self, source, parser, downloader, list_or_file_path, property_validate=True): """ :param source: :param parser: :param downloader: :param list_or_file_path: :param property_validate: for `root` packages we need check property, bad if we find packages from `lock` file, we can skip validate part :return: """ _auth_type = source.args['auth_type'].lower( ) if 'auth_type' in source.args else 'simple' _art_auth_etc = {} if 'auth' in source.args: self.search_auth(list_or_file_path, source) if _auth_type == 'simple': _art_auth_etc['auth'] = HTTPBasicAuth( *tuple(source.args['auth'])) session.auth = _art_auth_etc['auth'] # elif _auth_type == 'cert': # _art_auth_etc['cert'] = os.path.realpath(os.path.expanduser(source.args['auth'])) if 'auth' not in _art_auth_etc: msg = 'You have to set auth parameter for sources with artifactory-aql adapter' # self._log.error(msg) raise CrosspmException(CROSSPM_ERRORCODE_ADAPTER_ERROR, msg) if 'verify' in source.args: _art_auth_etc['verify'] = source.args['verify'].lower in [ 'true', 'yes', '1' ] else: _art_auth_etc['verify'] = False _secret_variables = self._config.secret_variables _pkg_name_col = self._config.name_column _packages_found = OrderedDict() _pkg_name_old = "" for _paths in parser.get_paths(list_or_file_path, source): _packages = [] _params_found = {} _params_found_raw = {} last_error = '' _pkg_name = _paths['params'][_pkg_name_col] if _pkg_name != _pkg_name_old: _pkg_name_old = _pkg_name self._log.info('{}: {}'.format( _pkg_name, { k: v for k, v in _paths['params'].items() if (k not in (_pkg_name_col, 'repo') and k not in _secret_variables) })) for _sub_paths in _paths['paths']: _tmp_params = dict(_paths['params']) self._log.info('repo: {}'.format(_sub_paths['repo'])) _tmp_params['repo'] = _sub_paths['repo'] try: _artifactory_server = _tmp_params['server'] _search_repo = _tmp_params['repo'] # TODO: Попробовать использовать lru_cache для кеширования кучи запросов _aql_query_url = '{}/api/search/aql'.format( _artifactory_server) _aql_query_dict = { "repo": { "$eq": _search_repo, }, } _usedby_aql = parser.get_usedby_aql(_tmp_params) if _usedby_aql is None: continue _aql_query_dict.update(_usedby_aql) query = 'items.find({query_dict}).include("*", "property")'.format( query_dict=json.dumps(_aql_query_dict)) session.auth = _art_auth_etc['auth'] r = session.post(_aql_query_url, data=query, verify=_art_auth_etc['verify']) r.raise_for_status() _found_paths = r.json() for _found in _found_paths['results']: _repo_path = "{artifactory}/{repo}/{path}/{file_name}".format( artifactory=_artifactory_server, repo=_found['repo'], path=_found['path'], file_name=_found['name']) _repo_path = ArtifactoryPath(_repo_path, **_art_auth_etc) _found_properties = { x['key']: x.get('value', '') for x in _found['properties'] } _matched, _params, _params_raw = parser.validate_path( str(_repo_path), _tmp_params) _params_found[_repo_path] = { k: v for k, v in _params.items() } _params_found_raw[_repo_path] = { k: v for k, v in _params_raw.items() } _params = _tmp_params _packages += [_repo_path] _params_found[_repo_path].update( {k: v for k, v in _params.items()}) _params_found[_repo_path]['filename'] = str( _repo_path.name) _params_raw = _params_found_raw.get(_repo_path, {}) params_found = {} # TODO: Проставление params брать из config.yaml usedby params = parser.get_params_from_properties( _found_properties) params.update( parser.get_params_from_path(str(_repo_path))) _package = Package(params[_pkg_name_col], _repo_path, params, downloader, self, parser, params_found, _params_raw) _package.find_usedby(None, property_validate=False) _packages_found[str(_repo_path)] = _package # _package.find_dependencies(_deps_file, property_validate=False) _mark = 'chosen' self._log.info(' {}: {}'.format( _mark, str(_repo_path))) except RuntimeError as e: try: err = json.loads(e.args[0]) except: err = {} if isinstance(err, dict): # Check errors # :e.args[0]: { # "errors" : [ { # "status" : 404, # "message" : "Not Found" # } ] # } for error in err.get('errors', []): err_status = error.get('status', -1) err_msg = error.get('message', '') if err_status == 401: msg = 'Authentication error[{}]{}'.format( err_status, (': {}'.format(err_msg)) if err_msg else '') elif err_status == 404: msg = last_error else: msg = 'Error[{}]{}'.format( err_status, (': {}'.format(err_msg)) if err_msg else '') if last_error != msg: self._log.error(msg) last_error = msg return _packages_found
def get_packages(self, source, parser, downloader, list_or_file_path, property_validate=True): """ :param source: :param parser: :param downloader: :param list_or_file_path: :param property_validate: for `root` packages we need check property, bad if we find packages from `lock` file, we can skip validate part :return: """ _auth_type = source.args['auth_type'].lower( ) if 'auth_type' in source.args else 'simple' _art_auth_etc = {} if 'auth' in source.args: self.search_auth(list_or_file_path, source) if _auth_type == 'simple': _art_auth_etc['auth'] = HTTPBasicAuth( *tuple(source.args['auth'])) session.auth = _art_auth_etc['auth'] # elif _auth_type == 'cert': # _art_auth_etc['cert'] = os.path.realpath(os.path.expanduser(source.args['auth'])) if 'auth' not in _art_auth_etc: msg = 'You have to set auth parameter for sources with artifactory-aql adapter' # self._log.error(msg) raise CrosspmException(CROSSPM_ERRORCODE_ADAPTER_ERROR, msg) if 'verify' in source.args: _art_auth_etc['verify'] = source.args['verify'].lower in [ 'true', 'yes', '1' ] else: _art_auth_etc['verify'] = False _pkg_name_column = self._config.name_column _secret_variables = self._config.secret_variables _packages_found = OrderedDict() _pkg_name_old = "" _packed_exist = False _packed_cache_params = None self._log.info('parser: {}'.format(parser._name)) for _paths in parser.get_paths(list_or_file_path, source): _packages = [] _params_found = {} _params_found_raw = {} last_error = '' _pkg_name = _paths['params'][_pkg_name_column] if _pkg_name != _pkg_name_old: _pkg_name_old = _pkg_name self._log.info('{}: {}'.format( _pkg_name, { k: v for k, v in _paths['params'].items() if (k not in (_pkg_name_column, 'repo') and k not in _secret_variables) })) for _sub_paths in _paths['paths']: _tmp_params = dict(_paths['params']) self._log.info('repo: {}'.format(_sub_paths['repo'])) for _path in _sub_paths['paths']: _tmp_params['repo'] = _sub_paths['repo'] # ------ START ---- # HACK for prefer-local if self._config.prefer_local and not parser.has_rule( 'properties'): params = parser.get_params_with_extra( 'path', _paths['params']) for param in params: param['repo'] = _tmp_params['repo'] _path_packed = downloader.cache.path_packed( None, param) _packed_exist = os.path.isfile(_path_packed) if _packed_exist: self._log.info( "Skip searching, use package cache in path {}" .format(_path_packed)) _packed_cache_params = param break # break check local cache if _packed_exist: break # break connect to artifactory # ------ END ---- _path_fixed, _path_pattern, _file_name_pattern = parser.split_fixed_pattern_with_file_name( _path) try: _artifactory_server = _tmp_params['server'] _search_repo = _tmp_params['repo'] # Get AQL path pattern, with fixed part path, without artifactory url and repository name _aql_path_pattern = _path_fixed[len(_artifactory_server ) + 1 + len(_search_repo) + 1:] if _path_pattern: _aql_path_pattern = _aql_path_pattern + "/" + _path_pattern _aql_query_url = '{}/api/search/aql'.format( _artifactory_server) _aql_query_dict = { "repo": { "$eq": _search_repo, }, "path": { "$match": _aql_path_pattern, }, "name": { "$match": _file_name_pattern, }, } # Remove path if is empty string if not _aql_path_pattern: _aql_query_dict.pop('path') query = 'items.find({query_dict}).include("*", "property")'.format( query_dict=json.dumps(_aql_query_dict)) session.auth = _art_auth_etc['auth'] r = session.post(_aql_query_url, data=query, verify=_art_auth_etc['verify']) r.raise_for_status() _found_paths = r.json() for _found in _found_paths['results']: _repo_path = "{artifactory}/{repo}/{path}/{file_name}".format( artifactory=_artifactory_server, repo=_found['repo'], path=_found['path'], file_name=_found['name']) _repo_path = ArtifactoryPath( _repo_path, **_art_auth_etc) _mark = 'found' _matched, _params, _params_raw = parser.validate_path( str(_repo_path), _tmp_params) if _matched: _params_found[_repo_path] = { k: v for k, v in _params.items() } _params_found_raw[_repo_path] = { k: v for k, v in _params_raw.items() } _mark = 'match' # Check if it's `root` packages or from `lock` file # ALSO, if from `lock` and have * in name - validate with property property_validate_tmp = property_validate or '*' in _file_name_pattern # If have not rule in config, skip this part if parser.has_rule('properties' ) and property_validate_tmp: _found_properties = { x['key']: x.get('value', '') for x in _found['properties'] } _valid, _params = parser.validate( _found_properties, 'properties', _tmp_params, return_params=True) else: _valid, _params = True, {} if _valid: _mark = 'valid' _packages += [_repo_path] _params_found[_repo_path].update( {k: v for k, v in _params.items()}) _params_found[_repo_path][ 'filename'] = str(_repo_path.name) self._log.debug(' {}: {}'.format( _mark, str(_repo_path))) except RuntimeError as e: try: err = json.loads(e.args[0]) except: err = {} if isinstance(err, dict): # Check errors # :e.args[0]: { # "errors" : [ { # "status" : 404, # "message" : "Not Found" # } ] # } for error in err.get('errors', []): err_status = error.get('status', -1) err_msg = error.get('message', '') if err_status == 401: msg = 'Authentication error[{}]{}'.format( err_status, (': {}'.format(err_msg)) if err_msg else '') elif err_status == 404: msg = last_error else: msg = 'Error[{}]{}'.format( err_status, (': {}'.format(err_msg)) if err_msg else '') if last_error != msg: self._log.error(msg) last_error = msg _package = None # HACK for prefer-local if _packed_exist: # HACK - Normalize params for cached archive for key, value in _packed_cache_params.items(): if isinstance(value, list): value = ['' if x is None else x for x in value] _packed_cache_params[key] = value _package = Package(_pkg_name, None, _paths['params'], downloader, self, parser, _packed_cache_params, list_or_file_path['raw'], {}, in_cache=True) # END HACK if _packages: _tmp = copy.deepcopy(_params_found) _packages = parser.filter_one(_packages, _paths['params'], _tmp) if isinstance(_packages, dict): _packages = [_packages] if len(_packages) == 1: _stat_pkg = self.pkg_stat(_packages[0]['path']) _params_raw = _params_found_raw.get( _packages[0]['path'], {}) _params_tmp = _params_found.get(_packages[0]['path'], {}) _params_tmp.update({ k: v for k, v in _packages[0]['params'].items() if k not in _params_tmp }) _package = Package(_pkg_name, _packages[0]['path'], _paths['params'], downloader, self, parser, _params_tmp, _params_raw, _stat_pkg) _mark = 'chosen' self._log.info(' {}: {}'.format( _mark, str(_packages[0]['path']))) elif len(_packages) > 1: raise CrosspmException( CROSSPM_ERRORCODE_MULTIPLE_DEPS, 'Multiple instances found for package [{}] not found.'. format(_pkg_name)) else: # Package not found: may be error, but it could be in other source. pass else: # Package not found: may be error, but it could be in other source. pass if (_package is not None) or (not self._config.no_fails): _added, _package = downloader.add_package(_pkg_name, _package) else: _added = False if _package is not None: _pkg_name = _package.name if _added or (_package is not None): if (_package is not None) or (not self._config.no_fails): if (_package is not None) or (_packages_found.get( _pkg_name, None) is None): _packages_found[_pkg_name] = _package if _added and (_package is not None): if downloader.do_load: _package.download() _deps_file = _package.get_file( self._config.deps_lock_file_name) if _deps_file: _package.find_dependencies(_deps_file, property_validate=False) elif self._config.deps_file_name: _deps_file = _package.get_file( self._config.deps_file_name) if _deps_file and os.path.isfile(_deps_file): _package.find_dependencies(_deps_file, property_validate=False) # HACK for not found packages _package_names = [ x[self._config.name_column] for x in list_or_file_path['raw'] ] _packages_found_names = [x.name for x in _packages_found.values()] for package in _package_names: if package not in _packages_found_names: _packages_found[package] = None return _packages_found
def get_packages(self, source, parser, downloader, list_or_file_path): _pkg_name_col = self._config.name_column _packages_found = {} _pkg_name_old = "" for _paths in parser.get_paths(list_or_file_path, source): _packages = [] _params_found = {} _params_found_raw = {} last_error = '' _pkg_name = _paths['params'][_pkg_name_col] if _pkg_name != _pkg_name_old: _pkg_name_old = _pkg_name self._log.info('{}: {}'.format( _pkg_name, { k: v for k, v in _paths['params'].items() if k not in (_pkg_name_col, 'repo') })) for _sub_paths in _paths['paths']: self._log.info('repo: {}'.format(_sub_paths['repo'])) for _path in _sub_paths['paths']: _tmp_params = dict(_paths['params']) _tmp_params['repo'] = _sub_paths['repo'] _path_fixed, _path_pattern = parser.split_fixed_pattern( _path) _repo_paths = FilesPath(_path_fixed) try: for _repo_path in _repo_paths.glob(_path_pattern): _mark = 'found' _matched, _params, _params_raw = parser.validate_path( str(_repo_path), _tmp_params) if _matched: _params_found[_repo_path] = { k: v for k, v in _params.items() } _params_found_raw[_repo_path] = { k: v for k, v in _params_raw.items() } _mark = 'match' _valid, _params = parser.validate( _repo_path.properties, 'properties', _tmp_params, return_params=True) if _valid: _mark = 'valid' _packages += [_repo_path] _params_found[_repo_path].update( {k: v for k, v in _params.items()}) _params_found[_repo_path][ 'filename'] = str(_repo_path.name) self._log.debug(' {}: {}'.format( _mark, str(_repo_path))) except RuntimeError as e: try: err = json.loads(e.args[0]) except: err = {} if isinstance(err, dict): # TODO: Check errors # e.args[0] = '''{ # "errors" : [ { # "status" : 404, # "message" : "Not Found" # } ] # }''' for error in err.get('errors', []): err_status = error.get('status', -1) err_msg = error.get('message', '') if err_status == 401: msg = 'Authentication error[{}]{}'.format( err_status, (': {}'.format(err_msg)) if err_msg else '') elif err_status == 404: msg = last_error else: msg = 'Error[{}]{}'.format( err_status, (': {}'.format(err_msg)) if err_msg else '') if last_error != msg: self._log.error(msg) last_error = msg _package = None if _packages: _packages = parser.filter_one(_packages, _paths['params'], _params_found) if isinstance(_packages, dict): _packages = [_packages] if len(_packages) == 1: _stat_pkg = self.pkg_stat(_packages[0]['path']) _params_raw = _params_found_raw.get( _packages[0]['path'], {}) _params_tmp = _params_found.get(_packages[0]['path'], {}) _params_tmp.update({ k: v for k, v in _packages[0]['params'].items() if k not in _params_tmp }) _package = Package(_pkg_name, _packages[0]['path'], _paths['params'], downloader, self, parser, _params_tmp, _params_raw, _stat_pkg) _mark = 'chosen' self._log.info(' {}: {}'.format( _mark, str(_packages[0]['path']))) elif len(_packages) > 1: raise CrosspmException( CROSSPM_ERRORCODE_MULTIPLE_DEPS, 'Multiple instances found for package [{}] not found.'. format(_pkg_name)) else: # Package not found: may be error, but it could be in other source. pass else: # Package not found: may be error, but it could be in other source. pass if (_package is not None) or (not self._config.no_fails): _added, _package = downloader.add_package(_pkg_name, _package) else: _added = False if _package is not None: _pkg_name = _package.name if _added or (_package is not None): if (_package is not None) or (not self._config.no_fails): if (_package is not None) or (_packages_found.get( _pkg_name, None) is None): _packages_found[_pkg_name] = _package if _added and (_package is not None): if downloader.do_load: _package.download() _deps_file = _package.get_file( self._config.deps_lock_file_name) if _deps_file: _package.find_dependencies(_deps_file) elif self._config.deps_file_name: _deps_file = _package.get_file( self._config.deps_file_name) if _deps_file and os.path.isfile(_deps_file): _package.find_dependencies(_deps_file) return _packages_found
class Downloader(Command): def __init__(self, config, do_load): self._log = logging.getLogger('crosspm') self._config = config # type: Config self.cache = config.cache self.solid = config.solid self.common_parser = Parser('common', {}, config) self._root_package = Package('<root>', 0, {self._config.name_column: '<root>'}, self, None, self.common_parser) if not config.deps_path: config.deps_path = \ config.deps_file_name if config.deps_file_name else CROSSPM_DEPENDENCY_FILENAME deps_path = config.deps_path if deps_path.__class__ is DependenciesContent: # HACK pass self._deps_path = deps_path else: deps_path = config.deps_path.strip().strip('"').strip("'") self._deps_path = os.path.realpath(os.path.expanduser(deps_path)) if not config.depslock_path: config.depslock_path = \ config.deps_lock_file_name if config.deps_lock_file_name else CROSSPM_DEPENDENCY_LOCK_FILENAME depslock_path = config.depslock_path if depslock_path.__class__ is DependenciesContent: # HACK self._depslock_path = depslock_path else: depslock_path = depslock_path.strip().strip('"').strip("'") self._depslock_path = os.path.realpath( os.path.expanduser(depslock_path)) self.do_load = do_load # Get list of all packages needed to resolve all the dependencies. # List of Package class instances. def get_dependency_packages(self, list_or_file_path=None, property_validate=True): """ :param list_or_file_path: :param property_validate: for `root` packages we need check property, bad if we find packages from `lock` file, we can skip validate part :return: """ if list_or_file_path is None: list_or_file_path = self._depslock_path if not os.path.isfile(list_or_file_path): list_or_file_path = self._deps_path _packages = OrderedDict() if isinstance(list_or_file_path, str): self._log.info('Reading dependencies ... [%s]', list_or_file_path) for i, _src in enumerate(self._config.sources()): if i > 0: self._log.info('') self._log.info('Next source ...') _found_packages = _src.get_packages(self, list_or_file_path, property_validate) _packages.update( OrderedDict([(k, v) for k, v in _found_packages.items() if _packages.get(k, None) is None])) if not self._config.no_fails: if isinstance(list_or_file_path, (list, tuple)): list_or_file_path = [ x for x in list_or_file_path if _packages.get( x[self._config.name_column], None) is None ] elif isinstance(list_or_file_path, dict) and isinstance( list_or_file_path.get('raw', None), list): list_or_file_path['raw'] = [ x for x in list_or_file_path['raw'] if _packages.get( x[self._config.name_column], None) is None ] return _packages def get_usedby_packages(self, list_or_file_path=None, property_validate=True): """ :param list_or_file_path: :param property_validate: for `root` packages we need check property, bad if we find packages from `lock` file, we can skip validate part :return: """ if list_or_file_path is None: list_or_file_path = self._depslock_path if not os.path.isfile(list_or_file_path): list_or_file_path = self._deps_path _packages = OrderedDict() if isinstance(list_or_file_path, str): self._log.info('Reading dependencies ... [%s]', list_or_file_path) for i, _src in enumerate(self._config.sources()): if i > 0: self._log.info('') self._log.info('Next source ...') _found_packages = _src.get_usedby(self, list_or_file_path, property_validate) _packages.update( OrderedDict([(k, v) for k, v in _found_packages.items() if _packages.get(k, None) is None])) if not self._config.no_fails: if isinstance(list_or_file_path, (list, tuple)): list_or_file_path = [ x for x in list_or_file_path if _packages.get( x[self._config.name_column], None) is None ] elif isinstance(list_or_file_path, dict) and isinstance( list_or_file_path.get('raw', None), list): list_or_file_path['raw'] = [ x for x in list_or_file_path['raw'] if _packages.get( x[self._config.name_column], None) is None ] return _packages # Download packages or just unpack already loaded (it's up to adapter to decide) def download_packages(self, depslock_file_path=None): if depslock_file_path is None: depslock_file_path = self._depslock_path if depslock_file_path.__class__ is DependenciesContent: # HACK для возможности проставления контента файла, а не пути pass elif isinstance(depslock_file_path, str): if not os.path.isfile(depslock_file_path): depslock_file_path = self._deps_path self.search_dependencies(depslock_file_path) if self.do_load: self._log.info('Unpack ...') total = len(self._root_package.all_packages) for i, _pkg in enumerate(self._root_package.all_packages): update_progress('Download/Unpack:', float(i) / float(total) * 100.0) if _pkg.download(): # self.packed_path): _pkg.unpack() # self.unpacked_path) update_progress('Download/Unpack:', 100) print_stdout('') self._log.info('Done!') sys.stdout.write('\n') sys.stdout.write('\n') sys.stdout.flush() if self._config.lock_on_success: from crosspm.helpers.locker import Locker depslock_path = os.path.realpath( os.path.join(os.path.dirname(depslock_file_path), self._config.deps_lock_file_name)) Locker(self._config, do_load=self.do_load).lock_packages( depslock_file_path, depslock_path, packages=self._root_package.packages) return self._root_package.all_packages def entrypoint(self, *args, **kwargs): self.download_packages(*args, **kwargs) def search_dependencies(self, depslock_file_path): self._log.info('Check dependencies ...') self._root_package.find_dependencies(depslock_file_path, property_validate=True) self._log.info('') self.set_duplicated_flag() self._log.info('Dependency tree:') self._root_package.print( 0, self._config.output('tree', [{ self._config.name_column: 0 }])) self.check_unique(self._config.no_fails) self.check_not_found() def check_not_found(self): _not_found = self.get_not_found_packages() if _not_found: raise CrosspmException( CROSSPM_ERRORCODE_PACKAGE_NOT_FOUND, 'Some package(s) not found: {}'.format(', '.join(_not_found))) def get_not_found_packages(self): return self._root_package.get_none_packages() def add_package(self, pkg_name, package): _added = False if package is not None: _added = True return _added, package def set_duplicated_flag(self): """ For all package set flag duplicated, if it's not unique package :return: """ package_by_name = defaultdict(list) for package1 in self._root_package.all_packages: if package1 is None: continue pkg_name = package1.package_name param_list = self._config.get_fails('unique', {}) params1 = package1.get_params(param_list) for package2 in package_by_name[pkg_name]: params2 = package2.get_params(param_list) for x in param_list: # START HACK for cached archive param1 = params1[x] param2 = params2[x] if isinstance(param1, list): param1 = [str(x) for x in param1] if isinstance(param2, list): param2 = [str(x) for x in param2] # END if str(param1) != str(param2): package1.duplicated = True package2.duplicated = True package_by_name[pkg_name].append(package1) def check_unique(self, no_fails): if no_fails: return not_unique = set(x.package_name for x in self._root_package.all_packages if x and x.duplicated) if not_unique: raise CrosspmException( CROSSPM_ERRORCODE_MULTIPLE_DEPS, 'Multiple versions of package "{}" found in dependencies.\nSee dependency tree in log (package with exclamation mark "!")' .format(', '.join(not_unique)), ) def get_raw_packages(self): """ Get all packages :return: list of all packages """ return self._root_package.all_packages def get_tree_packages(self): """ Get all packages, with hierarchy :return: list of first level packages, with child """ return self._root_package.packages
class Downloader(object): # _log = None # _config = None _depslock_path = '' # _package = None _packages = {} do_load = True def __init__(self, config, depslock_path='', do_load=True): self._log = logging.getLogger(__name__) self._config = config self._root_package = Package('<root>', 0, {self._config.name_column: '<root>'}, self, None, config.get_parser('common')) self._cache_path = config.crosspm_cache_root if not os.path.exists(self._cache_path): os.makedirs(self._cache_path) self.packed_path = os.path.realpath(os.path.join(self._cache_path, 'archive')) self.unpacked_path = os.path.realpath(os.path.join(self._cache_path, 'cache')) self.temp_path = os.path.realpath(os.path.join(self._cache_path, 'tmp')) if not depslock_path: depslock_path = config.deps_lock_file_name if config.deps_lock_file_name else CROSSPM_DEPENDENCY_LOCK_FILENAME self._depslock_path = os.path.realpath(depslock_path) self.do_load = do_load # Get list of all packages needed to resolve all the dependencies. # List of Package class instances. def get_packages(self, list_or_file_path=None): if list_or_file_path is None: list_or_file_path = self._depslock_path _packages = {} if type(list_or_file_path) is str: self._log.info('Reading dependencies ... [%s]', list_or_file_path) for i, _src in enumerate(self._config.sources()): if i > 0: print_stdout('') print_stdout('Next source ...') _found_packages = _src.get_packages(self, list_or_file_path) _packages.update({k: v for k, v in _found_packages.items() if (v is not None) or (k not in _packages)}) return _packages # Download packages or just unpack already loaded (it's up to adapter to decide) def download_packages(self, depslock_file_path=None): if depslock_file_path is None: depslock_file_path = self._depslock_path self._log.info('Check dependencies ...') print_stdout('Check dependencies ...') self._packages = {} self._root_package.find_dependencies(depslock_file_path) print_stdout('') print_stdout('Dependency tree:') self._root_package.print(0, self._config.output('tree', [{self._config.name_column: 0}])) _not_found = any(_pkg is None for _pkg in self._packages.values()) if not _not_found and self.do_load: self._log.info('Downloading ...') total = len(self._packages) for i, _pkg in enumerate(self._packages.values()): update_progress('Download/Unpack:', float(i) / float(total) * 100.0) if _pkg.download(self.packed_path): _pkg.unpack(self.unpacked_path) update_progress('Download/Unpack:', 100) self._log.info('Done!') sys.stdout.write('\n') sys.stdout.write('\n') sys.stdout.flush() return self._packages def add_package(self, pkg_name, package): _added = False if self._config.no_fails and package is not None: pkg_name = package.set_full_unique_name() if pkg_name in self._packages: if self._packages[pkg_name] is None: _added = True elif (package is not None) and (not self._config.no_fails): param_list = self._config.get_fails('unique', {}) params1 = self._packages[pkg_name].get_params(param_list) params2 = package.get_params(param_list) for x in param_list: if str(params1[x]) != str(params2[x]): raise CrosspmException( CROSSPM_ERRORCODE_MULTIPLE_DEPS, 'Multiple versions of package "{}" found in dependencies.'.format(pkg_name), ) else: _added = True if _added: self._packages[pkg_name] = package return _added, self._packages[pkg_name]
def get_packages(self, source, parser, downloader, packages_matches, property_validate=True): # TODO move request debug and thhp debug into comanline option # http.client.HTTPConnection.debuglevel = 1 # requests_log = logging.getLogger("requests.packages.urllib3") # requests_log.setLevel(logging.DEBUG) # requests_log.propagate = True _art_auth_etc = source.get_auth_params() _packages_found = OrderedDict() self._log.info('parser: {}'.format(parser._name)) repo_returned_packages_all = [] for _path in source.get_paths(packages_matches): _tmp_params = Dict(_path.params) self._log.info('repo: {}'.format(_tmp_params.repo)) session.auth = _art_auth_etc['auth'] aql = ArtifactoryPath(f"{_tmp_params.server}", session=session) _path_fixed, _path_pattern, _file_name_pattern = parser.split_fixed_pattern_with_file_name(_path.path) _package_versions_with_contracts, packages_with_invalid_naming_convention = \ self.find_package_versions(_file_name_pattern, _path_pattern, aql, _tmp_params.repo) if _package_versions_with_contracts: _package_versions_with_all_contracts, package_versions_with_missing_contracts = \ remove_package_versions_with_missing_contracts( _package_versions_with_contracts, _path.params['contracts'] ) for p, missing_contracts in package_versions_with_missing_contracts.items(): self._log.info(f"Skip {p} - missing contracts {missing_contracts}") if _package_versions_with_all_contracts: repo_returned_packages_all += _package_versions_with_all_contracts package_names = [x.package_name for x in packages_matches] bundle = Bundle(package_names, repo_returned_packages_all, downloader._config.trigger_packages, enable_tp_hides_higher_version=False) bundle_packages = bundle.calculate().values() self._log.info('Final bundle packages with contracts:') print_packages_by_contracts_scheme(self._log, bundle_packages) for p in bundle_packages: _packages_found[p.name] = Package(p.name, p.art_path, p, {}, downloader, self, parser, {}, {}) for p in package_names: if p not in _packages_found.keys(): _packages_found[p.name] = None return _packages_found
def get_packages(self, source, parser, downloader, list_or_file_path): _auth_type = source.args['auth_type'].lower() if 'auth_type' in source.args else 'simple' _art_auth = {} if 'auth' in source.args: if _auth_type == 'simple': _art_auth['auth'] = tuple(source.args['auth']) elif _auth_type == 'cert': _art_auth['cert'] = os.path.realpath(source.args['auth']) if 'verify' in source.args: _art_auth['verify'] = source.args['verify'].lower in ['true', 'yes', '1'] _pkg_name_col = self._config.name_column _packages_found = {} _pkg_name_old = "" for _paths in parser.get_paths(list_or_file_path, source): _packages = [] _pkg_name = _paths['params'][_pkg_name_col] if _pkg_name != _pkg_name_old: _pkg_name_old = _pkg_name print_stdout( '{}: {}'.format(_pkg_name, {k: v for k, v in _paths['params'].items() if k != _pkg_name_col})) for _path in _paths['paths']: _path_fixed, _path_pattern = parser.split_fixed_pattern(_path) _repo_paths = ArtifactoryPath(_path_fixed, **_art_auth) for _repo_path in _repo_paths.glob(_path_pattern): _mark = 'found' if parser.validate_path(str(_repo_path), _paths['params']): _mark = 'match' if parser.validate(_repo_path.properties, 'properties', _paths['params']): _mark = 'valid' _packages += [_repo_path] print_stdout(' {}: {}'.format(_mark, str(_repo_path))) _package = None if _packages: _packages = parser.filter_one(_packages, _paths['params']) if type(_packages) is dict: _packages = [_packages] if len(_packages) == 1: # one package found: ok! # _stat = _packages[0]['path'].stat() # _stat = {k: getattr(_stat, k, None) for k in ('ctime', # 'mtime', # 'md5', # 'sha1', # 'size')} _package = Package(_pkg_name, _packages[0]['path'], _paths['params'], downloader, self, parser, _packages[0]['params']) # , _stat) _mark = 'chosen' print_stdout(' {}: {}'.format(_mark, str(_packages[0]['path']))) elif len(_packages) > 1: # TODO: multiple packages found: wtf?! raise CrosspmException( CROSSPM_ERRORCODE_MULTIPLE_DEPS, 'Multiple instances found for package [{}] not found.'.format(_pkg_name) ) else: # Package not found: may be error, but it could be in other source. pass else: # Package not found: may be error, but it could be in other source. pass # _pkg_name = self._config.get_column_name(0) # raise CrosspmException( # CROSSPM_ERRORCODE_PACKAGE_NOT_FOUND, # 'Package [{}] not found.'.format(_pkg_name) # ) if (_package is not None) or (not self._config.no_fails): _added, _package = downloader.add_package(_pkg_name, _package) else: _added = False if _package is not None: _pkg_name = _package.get_name_and_path(True) if _added or (_package is not None): if (_package is not None) or (not self._config.no_fails): if (_package is not None) or (_packages_found.get(_pkg_name, None) is None): _packages_found[_pkg_name] = _package if _added and (_package is not None): if downloader.do_load: _package.download(downloader.packed_path) _deps_file = _package.get_file(self._config.deps_lock_file_name, downloader.temp_path) if _deps_file: _package.find_dependencies(_deps_file) return _packages_found
def get_packages(self, source, parser, downloader, list_or_file_path): _auth_type = source.args['auth_type'].lower( ) if 'auth_type' in source.args else 'simple' _art_auth = {} if 'auth' in source.args: if _auth_type == 'simple': _art_auth['auth'] = tuple(source.args['auth']) elif _auth_type == 'cert': _art_auth['cert'] = os.path.realpath(source.args['auth']) if 'verify' in source.args: _art_auth['verify'] = source.args['verify'].lower in [ 'true', 'yes', '1' ] _pkg_name_col = self._config.name_column _packages_found = {} _pkg_name_old = "" for _paths in parser.get_paths(list_or_file_path, source): _packages = [] _pkg_name = _paths['params'][_pkg_name_col] if _pkg_name != _pkg_name_old: _pkg_name_old = _pkg_name print_stdout('{}: {}'.format( _pkg_name, { k: v for k, v in _paths['params'].items() if k != _pkg_name_col })) for _path in _paths['paths']: _path_fixed, _path_pattern = parser.split_fixed_pattern(_path) _repo_paths = ArtifactoryPath(_path_fixed, **_art_auth) for _repo_path in _repo_paths.glob(_path_pattern): _mark = 'found' if parser.validate_path(str(_repo_path), _paths['params']): _mark = 'match' if parser.validate(_repo_path.properties, 'properties', _paths['params']): _mark = 'valid' _packages += [_repo_path] print_stdout(' {}: {}'.format(_mark, str(_repo_path))) _package = None if _packages: _packages = parser.filter_one(_packages, _paths['params']) if type(_packages) is dict: _packages = [_packages] if len(_packages) == 1: # one package found: ok! # _stat = _packages[0]['path'].stat() # _stat = {k: getattr(_stat, k, None) for k in ('ctime', # 'mtime', # 'md5', # 'sha1', # 'size')} _package = Package(_pkg_name, _packages[0]['path'], _paths['params'], downloader, self, parser, _packages[0]['params']) # , _stat) _mark = 'chosen' print_stdout(' {}: {}'.format(_mark, str(_packages[0]['path']))) elif len(_packages) > 1: # TODO: multiple packages found: wtf?! raise CrosspmException( CROSSPM_ERRORCODE_MULTIPLE_DEPS, 'Multiple instances found for package [{}] not found.'. format(_pkg_name)) else: # Package not found: may be error, but it could be in other source. pass else: # Package not found: may be error, but it could be in other source. pass # _pkg_name = self._config.get_column_name(0) # raise CrosspmException( # CROSSPM_ERRORCODE_PACKAGE_NOT_FOUND, # 'Package [{}] not found.'.format(_pkg_name) # ) if (_package is not None) or (not self._config.no_fails): _added, _package = downloader.add_package(_pkg_name, _package) else: _added = False if _package is not None: _pkg_name = _package.get_name_and_path(True) if _added or (_package is not None): if (_package is not None) or (not self._config.no_fails): if (_package is not None) or (_packages_found.get( _pkg_name, None) is None): _packages_found[_pkg_name] = _package if _added and (_package is not None): if downloader.do_load: _package.download(downloader.packed_path) _deps_file = _package.get_file( self._config.deps_lock_file_name, downloader.temp_path) if _deps_file: _package.find_dependencies(_deps_file) return _packages_found