def test_generate_unit_key(self): # Test key = self.command.generate_unit_key(self.filename) # Verify expected_key = Module.generate_unit_key('valid', '1.0.0', 'jdob') self.assertEqual(key, expected_key)
def test_generate_unit_key(self): # Test key = self.command.generate_unit_key(self.filename) # Verify expected_key = Module.generate_unit_key('valid', '1.0.0', 'jdob') self.assertEqual(key, expected_key)
def test_generate_unit_key_complex_version(self): filename = os.path.join(MODULES_DIR, 'jdob-valid-1.0.0-rc1.tar.gz') # Test key = self.command.generate_unit_key(filename) # Verify expected_key = Module.generate_unit_key('valid', '1.0.0-rc1', 'jdob') self.assertEqual(key, expected_key)
def test_generate_unit_key_complex_version(self): filename = os.path.join(MODULES_DIR, 'jdob-valid-1.0.0-rc1.tar.gz') # Test key = self.command.generate_unit_key(filename) # Verify expected_key = Module.generate_unit_key('valid', '1.0.0-rc1', 'jdob') self.assertEqual(key, expected_key)
if self._canceled: break module = modules_by_key[key] try: self._add_new_module(downloader, module) self.progress_report.modules_finished_count += 1 except Exception, e: self.progress_report.add_failed_module(module, e, sys.exc_info()[2]) self.progress_report.update_progress() # Remove missing units if the configuration indicates to do so if self._should_remove_missing(): existing_units_by_key = {} for u in existing_units: unit_key = Module.generate_unit_key(u.unit_key['name'], u.unit_key['version'], u.unit_key['author']) s = unit_key_str(unit_key) existing_units_by_key[s] = u for key in remove_unit_keys: doomed = existing_units_by_key[key] self.sync_conduit.remove_unit(doomed) self.downloader = None def _add_new_module(self, downloader, module): """ Performs the tasks for downloading and saving a new unit in Pulp. :param downloader: downloader instance to use for retrieving the unit :param module: module instance to download
def generate_unit_key(self, filename, **kwargs): root_filename = os.path.basename(filename) root_filename = root_filename[:-len('.tar.gz')] author, name, version = root_filename.split('-', 2) unit_key = Module.generate_unit_key(name, version, author) return unit_key
module = modules_by_key[key] try: self._add_new_module(downloader, module) self.progress_report.modules_finished_count += 1 except Exception, e: self.progress_report.add_failed_module(module, e, sys.exc_info()[2]) self.progress_report.update_progress() # Remove missing units if the configuration indicates to do so if self._should_remove_missing(): existing_units_by_key = {} for u in existing_units: unit_key = Module.generate_unit_key(u.unit_key['name'], u.unit_key['version'], u.unit_key['author']) s = unit_key_str(unit_key) existing_units_by_key[s] = u for key in remove_unit_keys: doomed = existing_units_by_key[key] self.sync_conduit.remove_unit(doomed) self.downloader = None def _add_new_module(self, downloader, module): """ Performs the tasks for downloading and saving a new unit in Pulp. :param downloader: downloader instance to use for retrieving the unit
def generate_unit_key(self, filename, **kwargs): root_filename = os.path.basename(filename) root_filename = root_filename[:-len('.tar.gz')] author, name, version = root_filename.split('-', 2) unit_key = Module.generate_unit_key(name, version, author) return unit_key
def _do_import_modules(self, metadata): """ Actual logic of the import. This method will do a best effort per module; if an individual module fails it will be recorded and the import will continue. This method will only raise an exception in an extreme case where it cannot react and continue. """ def unit_key_str(unit_key_dict): """ Converts the unit key dict form into a single string that can be used as the key in a dict lookup. """ template = '%s-%s-%s' return template % (encode_unicode(unit_key_dict['name']), encode_unicode(unit_key_dict['version']), encode_unicode(unit_key_dict['author'])) downloader = self._create_downloader() # Ease lookup of modules modules_by_key = dict([(unit_key_str(m.unit_key()), m) for m in metadata.modules]) # Collect information about the repository's modules before changing it module_criteria = UnitAssociationCriteria(type_ids=[constants.TYPE_PUPPET_MODULE]) existing_units = self.sync_conduit.get_units(criteria=module_criteria) existing_modules = [Module.from_unit(x) for x in existing_units] existing_module_keys = [unit_key_str(m.unit_key()) for m in existing_modules] new_unit_keys = self._resolve_new_units(existing_module_keys, modules_by_key.keys()) remove_unit_keys = self._resolve_remove_units(existing_module_keys, modules_by_key.keys()) # Once we know how many things need to be processed, we can update the # progress report self.progress_report.modules_total_count = len(new_unit_keys) self.progress_report.modules_finished_count = 0 self.progress_report.modules_error_count = 0 self.progress_report.update_progress() # Add new units for key in new_unit_keys: module = modules_by_key[key] try: self._add_new_module(downloader, module) self.progress_report.modules_finished_count += 1 except Exception: self.progress_report.add_failed_module(module, sys.exc_info()[2]) self.progress_report.update_progress() # Remove missing units if the configuration indicates to do so if self._should_remove_missing(): existing_units_by_key = {} for u in existing_units: unit_key = Module.generate_unit_key(u.unit_key['name'], u.unit_key['version'], u.unit_key['author']) s = unit_key_str(unit_key) existing_units_by_key[s] = u for key in remove_unit_keys: doomed = existing_units_by_key[key] self.sync_conduit.remove_unit(doomed)