def resolve_dependencies(self, repo, units, dependency_conduit, config): _LOG.info("Resolve Dependencies Invoked") result_dict = {} pkglist = [] for unit in units: if unit.type_id == 'rpm': pkglist.append("%s-%s-%s.%s" % (unit.unit_key['name'], unit.unit_key['version'], unit.unit_key['release'], unit.unit_key['arch'])) dsolve = depsolver.DepSolver([repo], pkgs=pkglist) if config.get('recursive'): results = dsolve.getRecursiveDepList() else: results = dsolve.getDependencylist() solved, unsolved = dsolve.processResults(results) dep_pkgs_map = {} _LOG.info(" results from depsolver %s" % results) criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_RPM]) existing_units = get_existing_units(dependency_conduit, criteria) for dep, pkgs in solved.items(): dep_pkgs_map[dep] = [] for pkg in pkgs: if not existing_units.has_key(pkg): continue epkg = existing_units[pkg] dep_pkgs_map[dep].append(epkg.unit_key) _LOG.debug("deps packages suggested %s" % solved) result_dict['resolved'] = dep_pkgs_map result_dict['unresolved'] = unsolved result_dict['printable_dependency_result'] = dsolve.printable_result(results) dsolve.cleanup() _LOG.info("result dict %s" % result_dict) return result_dict
def find_missing_dependencies(self, repo, units, conduit, config): """ Find dependencies within the specified repository that are not included in the specified I{units} list. This method is intended to be used by import_units() to ensure that all dependencies of imported units are satisfied. @param repo: A plugin repo model object. @type repo: L{pulp.plugins.model.Repository} @param units: A list of content units. Unit is: L{pulp.plugins.model.Unit} @type units: list @param conduit: An import conduit. @type conduit: L{pulp.plugins.conduits.unit_import.ImportConduit} @param config: plugin configuration @type config: L{pulp.server.plugins.config.PluginCallConfiguration} @return: The list of missing dependencies (units). Unit is: L{pulp.plugins.model.Unit} @rtype: list """ missing_deps = [] deps = self.resolve_dependencies(repo, units, conduit, config) resolved = itertools.chain(*deps['resolved'].values()) if resolved: keylist = self.keylist(resolved) criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_RPM]) inventory = get_existing_units(conduit, criteria) for key in keylist: unit = inventory.get(key) if unit is None: continue missing_deps.append(unit) return missing_deps
def resolve_dependencies(self, repo, units, dependency_conduit, config): result_dict = {} pkglist = self.pkglist(units) # generate metadata for the source repo metadata_status, metadata_errors = self._generate_metadata(repo, dependency_conduit, config) if not metadata_status: # unable to generate metadata, lets fail right here as dependency resolver wont handle this # gracefully. msg = "Failed to generate metadata for repo %s; Error %s" % (repo.id, metadata_errors) _LOG.error(msg) raise metadata.GenerateYumMetadataException(msg) dsolve = depsolver.DepSolver([repo], pkgs=pkglist) if config.get('recursive'): results = dsolve.getRecursiveDepList() else: results = dsolve.getDependencylist() solved, unsolved = dsolve.processResults(results) dep_pkgs_map = {} _LOG.debug(" results from depsolver %s" % results) criteria = UnitAssociationCriteria(type_ids=[TYPE_ID_RPM]) existing_units = get_existing_units(dependency_conduit, criteria) for dep, pkgs in solved.items(): dep_pkgs_map[dep] = [] for pkg in pkgs: if not existing_units.has_key(pkg): continue epkg = existing_units[pkg] dep_pkgs_map[dep].append(epkg.unit_key) _LOG.debug("deps packages suggested %s" % solved) result_dict['resolved'] = dep_pkgs_map result_dict['unresolved'] = unsolved result_dict['printable_dependency_result'] = dsolve.printable_result(results) dsolve.cleanup() return result_dict
def test_get_existing_units(self): unit_key = {} for k in UNIT_KEY_RPM: unit_key[k] = "test_value" existing_units = [Unit(TYPE_ID_RPM, unit_key, "test_metadata", os.path.join(self.pkg_dir, "test_rel_path"))] sync_conduit = importer_mocks.get_sync_conduit(existing_units=existing_units, pkg_dir=self.pkg_dir) actual_existing_units = importer_rpm.get_existing_units(sync_conduit) self.assertEquals(len(actual_existing_units), 1) self.assertEquals(len(existing_units), len(actual_existing_units)) lookup_key = importer_rpm.form_lookup_key(unit_key) self.assertEqual(existing_units[0], actual_existing_units[lookup_key])
def import_units(self, source_repo, dest_repo, import_conduit, config, units=None): """ @param source_repo: metadata describing the repository containing the units to import @type source_repo: L{pulp.plugins.data.Repository} @param dest_repo: metadata describing the repository to import units into @type dest_repo: L{pulp.plugins.data.Repository} @param import_conduit: provides access to relevant Pulp functionality @type import_conduit: L{pulp.plugins.conduits.unit_import.ImportUnitConduit} @param config: plugin configuration @type config: L{pulp.plugins.plugins.config.PluginCallConfiguration} @param units: optional list of pre-filtered units to import @type units: list of L{pulp.plugins.data.Unit} """ if not units: # If no units are passed in, assume we will use all units from source repo units = import_conduit.get_source_units() blacklist_units = self._query_blacklist_units(import_conduit, config) _LOG.info("Importing %s units from %s to %s" % (len(units), source_repo.id, dest_repo.id)) existing_rpm_units_dict = get_existing_units(import_conduit, criteria=UnitAssociationCriteria(type_ids=[TYPE_ID_RPM, TYPE_ID_SRPM])) for u in units: if u.unit_key in blacklist_units: continue # do any additional work associated with the unit if u.type_id == TYPE_ID_RPM: import_conduit.associate_unit(u) # if its an rpm unit process dependencies and import them as well self._import_unit_dependencies(source_repo, [u], import_conduit, config, existing_rpm_units=existing_rpm_units_dict, blacklist_units=blacklist_units) elif u.type_id == TYPE_ID_ERRATA: import_conduit.associate_unit(u) # if erratum, lookup deps and process associated units self._import_errata_unit_rpms(source_repo, u, import_conduit, config, existing_rpm_units_dict, blacklist_units=blacklist_units) elif u.type_id == TYPE_ID_PKG_GROUP: u = self._safe_copy_unit(u) u.unit_key['repo_id'] = dest_repo.id import_conduit.save_unit(u) # pkg group unit associated, lookup child units underneath and import them as well self._import_pkg_group_unit(source_repo, u, import_conduit, config) elif u.type_id == TYPE_ID_PKG_CATEGORY: u = self._safe_copy_unit(u) u.unit_key['repo_id'] = dest_repo.id import_conduit.save_unit(u) # pkg category associated, lookup pkg groups underneath and import them as well self._import_pkg_category_unit(source_repo, u, import_conduit, config) elif u.type_id == TYPE_ID_DISTRO: import_conduit.associate_unit(u) _LOG.debug("%s units from %s have been associated to %s" % (len(units), source_repo.id, dest_repo.id))
def link_errata_rpm_units(sync_conduit, new_errata_units): """ Links errata to corresponding rpms @param sync_conduit @type sync_conduit pulp.server.content.conduits.repo_sync.RepoSyncConduit @param new_errata_units: errata units to link @type new_errata_units: {} @return a link_report dictionry. @rtype {} """ link_report = {} # link errata and rpm units criteria = UnitAssociationCriteria(type_ids=[importer_rpm.RPM_TYPE_ID, importer_rpm.SRPM_TYPE_ID]) existing_rpms = importer_rpm.get_existing_units(sync_conduit, criteria=criteria) link_report['linked_units'] = [] link_report['missing_rpms'] = [] for u in new_errata_units.values(): pkglist = u.metadata['pkglist'] for pkg in pkglist: for pinfo in pkg['packages']: if not pinfo.has_key('sum'): _LOG.debug("Missing checksum info on package <%s> for linking a rpm to an erratum." % (pinfo)) continue pinfo['checksumtype'], pinfo['checksum'] = pinfo['sum'] rpm_key = importer_rpm.form_lookup_key(pinfo) if rpm_key in existing_rpms.keys(): rpm_unit = existing_rpms[rpm_key] _LOG.info("Found matching rpm unit %s" % rpm_unit) sync_conduit.link_unit(u, rpm_unit, bidirectional=True) link_report['linked_units'].append(rpm_unit) else: link_report['missing_rpms'].append(pinfo) _LOG.info("rpm unit %s not found; skipping" % pinfo) return link_report