def test_unit_applicable(self): # Errata refers to RPMs which ARE part of our test consumer's profile, # AND in the repo. errata_obj = self.get_test_errata_object() errata_unit = Unit(TYPE_ID_ERRATA, {"id": errata_obj["id"]}, errata_obj, None) errata_unit.id = 'an_errata' rpm_unit_key = self.create_profile_entry("emoticons", 0, "0.1", "2", "x86_64", "Test Vendor") rpm_unit = Unit(TYPE_ID_RPM, rpm_unit_key, {}, None) # Let's give it an id, so we can assert for it later rpm_unit.id = 'a_test_id' test_repo = profiler_mocks.get_repo("test_repo_id") prof = YumProfiler() errata_rpms = prof._get_rpms_from_errata(errata_unit) conduit = profiler_mocks.get_profiler_conduit( repo_units=[errata_unit, rpm_unit], repo_bindings=[test_repo], errata_rpms=errata_rpms) unit_profile = self.test_consumer.profiles[TYPE_ID_RPM] bound_repo_id = "test_repo_id" report_list = prof.calculate_applicable_units(unit_profile, bound_repo_id, None, conduit) self.assertEqual(report_list, { TYPE_ID_RPM: ['a_test_id'], TYPE_ID_ERRATA: ['an_errata'] })
def test_handle_erratum_with_link(self, mock_link): # Setup unit_key = {'id': 'test-erratum'} metadata = {'a': 'a'} config = PluginCallConfiguration({}, {}) mock_repo = mock.MagicMock() mock_conduit = mock.MagicMock() inited_unit = Unit(models.Errata.TYPE, unit_key, metadata, None) saved_unit = Unit(models.Errata.TYPE, unit_key, metadata, None) saved_unit.id = 'ihaveanidnow' mock_conduit.init_unit.return_value = inited_unit mock_conduit.save_unit.return_value = saved_unit # Test upload._handle_erratum(mock_repo, models.Errata.TYPE, unit_key, metadata, None, mock_conduit, config) # Verify mock_conduit.init_unit.assert_called_once_with(models.Errata.TYPE, unit_key, metadata, None) mock_conduit.save_unit.assert_called_once_with(inited_unit) mock_link.assert_called_once() self.assertEqual(mock_link.call_args[0][0], mock_conduit) self.assertTrue(isinstance(mock_link.call_args[0][1], models.Errata)) # it is very important that this is the saved_unit, and not the inited_unit, # because the underlying link logic requires it to have an "id". self.assertTrue(mock_link.call_args[0][2] is saved_unit)
def test_import_units__units_empty_list(self): """ Make sure that when an empty list is passed, we import zero units. """ source_units = [ Unit(ids.TYPE_ID_ISO, {'name': 'test.iso'}, {}, '/path/test.iso'), Unit(ids.TYPE_ID_ISO, {'name': 'test2.iso'}, {}, '/path/test2.iso'), Unit(ids.TYPE_ID_ISO, {'name': 'test3.iso'}, {}, '/path/test3.iso') ] import_conduit = importer_mocks.get_import_conduit( source_units=source_units) # source_repo, dest_repo, and config aren't used by import_units, so we'll just set them to # None for simplicity. Let's pass an empty list as the units we want to import units_to_import = [] imported_units = self.iso_importer.import_units(None, None, import_conduit, None, units=units_to_import) # There should have been zero calls to the import_conduit. None to get_source_units(), and # none to associate units. self.assertEqual(len(import_conduit.get_source_units.call_args_list), 0) self.assertEqual(len(import_conduit.associate_unit.call_args_list), 0) # Make sure that the returned units are correct self.assertEqual(imported_units, units_to_import)
def test_import_units__units_none(self): """ Make sure that when units=None, we import all units from the import_conduit. """ source_units = [Unit(ids.TYPE_ID_ISO, {'name': 'test.iso'}, {}, '/path/test.iso'), Unit(ids.TYPE_ID_ISO, {'name': 'test2.iso'}, {}, '/path/test2.iso'), Unit(ids.TYPE_ID_ISO, {'name': 'test3.iso'}, {}, '/path/test3.iso')] import_conduit = importer_mocks.get_import_conduit(source_units=source_units) # source_repo, dest_repo, and config aren't used by import_units, so we'll just set them to # None for simplicity. imported_units = self.iso_importer.import_units(None, None, import_conduit, None, units=None) # There should have been four calls to the import_conduit. One to get_source_units(), and # three to associate units. # get_source_units should have a UnitAssociationCriteria that specified ISOs, so we'll # assert that behavior. self.assertEqual(len(import_conduit.get_source_units.call_args_list), 1) get_source_units_args = tuple(import_conduit.get_source_units.call_args_list[0])[1] self.assertEqual(get_source_units_args['criteria']['type_ids'], [ids.TYPE_ID_ISO]) # There are three Units, so there should be three calls to associate_unit since we didn't # pass which units we wanted to import. Let's make sure the three calls were made with the # correct Units. self.assertEqual(len(import_conduit.associate_unit.call_args_list), 3) expected_unit_names = ['test.iso', 'test2.iso', 'test3.iso'] actual_unit_names = [tuple(call)[0][0].unit_key['name'] for call in import_conduit.associate_unit.call_args_list] self.assertEqual(actual_unit_names, expected_unit_names) # The three Units should have been returned self.assertEqual(imported_units, source_units)
def test_import_units__units_some(self): """ Make sure that when units are passed, we import only those units. """ source_units = [Unit(ids.TYPE_ID_ISO, {'name': 'test.iso'}, {}, '/path/test.iso'), Unit(ids.TYPE_ID_ISO, {'name': 'test2.iso'}, {}, '/path/test2.iso'), Unit(ids.TYPE_ID_ISO, {'name': 'test3.iso'}, {}, '/path/test3.iso')] import_conduit = importer_mocks.get_import_conduit(source_units=source_units) # source_repo, dest_repo, and config aren't used by import_units, so we'll just set them to # None for simplicity. Let's use test.iso and test3.iso, leaving out test2.iso. units_to_import = [source_units[i] for i in range(0, 3, 2)] imported_units = self.iso_importer.import_units(None, None, import_conduit, None, units=units_to_import) # There should have been two calls to the import_conduit. None to get_source_units(), and # two to associate units. self.assertEqual(len(import_conduit.get_source_units.call_args_list), 0) # There are two Units, so there should be two calls to associate_unit since we passed which # units we wanted to import. Let's make sure the two calls were made with the # correct Units. self.assertEqual(len(import_conduit.associate_unit.call_args_list), 2) expected_unit_names = ['test.iso', 'test3.iso'] actual_unit_names = [tuple(call)[0][0].unit_key['name'] for call in import_conduit.associate_unit.call_args_list] self.assertEqual(actual_unit_names, expected_unit_names) # Make sure that the returned units are correct self.assertEqual(imported_units, units_to_import)
def test_some_existing(self): postfix = model_factory.rpm_models(1)[0] postfix.name = 'postfix' vim = model_factory.rpm_models(1)[0] vim.name = 'vim-common' existing = [ Unit(postfix.TYPE, postfix.unit_key, postfix.metadata, ''), Unit(vim.TYPE, vim.unit_key, vim.metadata, ''), ] conduit = ImportUnitConduit('', '', '', '') conduit.get_destination_units = mock.MagicMock( spec_set=conduit.get_destination_units, return_value=existing) ret = associate.get_rpms_to_copy_by_name(self.RPM_NAMES, conduit) self.assertEqual(set(ret), set(['python-mock'])) self.assertEqual(conduit.get_destination_units.call_count, 1) self.assertTrue( isinstance(conduit.get_destination_units.call_args[0][0], UnitAssociationCriteria)) self.assertEqual( conduit.get_destination_units.call_args[0][0].type_ids, [models.RPM.TYPE]) self.assertEqual( conduit.get_destination_units.call_args[0][0].unit_fields, models.RPM.UNIT_KEY_NAMES)
def setUp(self): config = { importer_constants.KEY_FEED: 'http://fake.com/iso_feed/', importer_constants.KEY_MAX_SPEED: 500.0, importer_constants.KEY_MAX_DOWNLOADS: 5, importer_constants.KEY_SSL_VALIDATION: False, importer_constants.KEY_SSL_CLIENT_CERT: "Trust me, I'm who I say I am.", importer_constants.KEY_SSL_CLIENT_KEY: "Secret Key", importer_constants.KEY_SSL_CA_CERT: "Uh, I guess that's the right server.", importer_constants.KEY_PROXY_HOST: 'proxy.com', importer_constants.KEY_PROXY_PORT: 1234, importer_constants.KEY_PROXY_USER: "******", importer_constants.KEY_PROXY_PASS: '******', importer_constants.KEY_VALIDATE: False, } self.config = importer_mocks.get_basic_config(**config) self.temp_dir = tempfile.mkdtemp() self.pkg_dir = os.path.join(self.temp_dir, 'content') os.mkdir(self.pkg_dir) # These checksums correspond to the checksums of the files that our curl mocks will # generate. Our curl mocks do not have a test4.iso, so that one is to test removal of # old ISOs during sync self.existing_units = [ Unit( TYPE_ID_ISO, { 'name': 'test.iso', 'size': 16, 'checksum': 'f02d5a72cd2d57fa802840a76b44c6c6920a8b8e6b90b20e26c03876275069e0' }, {}, '/path/test.iso'), Unit( TYPE_ID_ISO, { 'name': 'test2.iso', 'size': 22, 'checksum': 'c7fbc0e821c0871805a99584c6a384533909f68a6bbe9a2a687d28d9f3b10c16' }, {}, '/path/test2.iso'), Unit(TYPE_ID_ISO, { 'name': 'test4.iso', 'size': 4, 'checksum': 'sum4' }, {}, '/path/test4.iso') ] self.sync_conduit = importer_mocks.get_sync_conduit( pkg_dir=self.pkg_dir, existing_units=self.existing_units, pulp_units=self.existing_units) self.iso_sync_run = ISOSyncRun(self.sync_conduit, self.config)
def test_install_units(self): """ Verify that all available packages in the erratum are installed In this test, there are two packages in the erratum, and both are available to the consumer. Thus, both should be installed. """ repo_id = "test_repo_id" errata_obj = self.get_test_errata_object() errata_unit = Unit(TYPE_ID_ERRATA, {"id": errata_obj["id"]}, errata_obj, None) existing_units = [errata_unit] test_repo = profiler_mocks.get_repo(repo_id) # create two RPM units that match what is in the erratum rpm_units = [] rpm_unit_key_1 = self.create_profile_entry("emoticons", 0, "0.1", "2", "x86_64", "Test Vendor") rpm_units.append(Unit(TYPE_ID_RPM, rpm_unit_key_1, {}, None)) rpm_unit_key_2 = self.create_profile_entry("patb", 0, "0.1", "2", "x86_64", "Test Vendor") rpm_units.append(Unit(TYPE_ID_RPM, rpm_unit_key_2, {}, None)) existing_units += rpm_units conduit = profiler_mocks.get_profiler_conduit( existing_units=existing_units, repo_bindings=[test_repo], repo_units=rpm_units) example_errata = { "unit_key": errata_unit.unit_key, "type_id": TYPE_ID_ERRATA } prof = YumProfiler() translated_units = prof.install_units(self.test_consumer, [example_errata], None, None, conduit) # check repo_id passed to the conduit get_units() self.assertEqual(conduit.get_units.call_args[0][0].id, repo_id) # check unit association criteria passed to the conduit get_units() self.assertEqual(conduit.get_units.call_args_list[0][0][1].type_ids, [TYPE_ID_ERRATA]) self.assertEqual( conduit.get_units.call_args_list[0][0][1].unit_filters, errata_unit.unit_key) # validate translated units self.assertEqual(len(translated_units), 2) expected = prof._get_rpms_from_errata(errata_unit) for u in translated_units: rpm_unit_key = u["unit_key"] self.assertTrue(rpm_unit_key in expected)
def get_units(self, criteria=None): units = [ Unit(constants.TYPE_PUPPET_MODULE, { 'name': 'valid', 'version': '1.1.0', 'author': 'jdob' }, {}, ''), Unit(constants.TYPE_PUPPET_MODULE, { 'name': 'good', 'version': '2.0.0', 'author': 'adob' }, {}, ''), ] return units
def test_with_busybox(self): models = [ DockerImage(data.busybox_ids[0], data.busybox_ids[1], 1024), ] dest = tempfile.mkdtemp() try: # prepare some state model_dest = os.path.join(dest, models[0].relative_path) unit = Unit(DockerImage.TYPE_ID, models[0].unit_key, models[0].unit_metadata, model_dest) self.conduit.init_unit.return_value = unit # call the save, letting it write files to disk upload.save_models(self.conduit, models, data.busybox_ids, data.busybox_tar_path) # assertions! self.conduit.save_unit.assert_called_once_with(unit) # make sure the ancestry was computed and saved correctly ancestry = json.load(open(os.path.join(model_dest, 'ancestry'))) self.assertEqual(set(ancestry), set(data.busybox_ids)) # make sure these files were moved into place self.assertTrue(os.path.exists(os.path.join(model_dest, 'json'))) self.assertTrue(os.path.exists(os.path.join(model_dest, 'layer'))) finally: shutil.rmtree(dest)
def test_process_main(self, mock_shutil, mock_stat): """ Test that we save properly if everything is ok """ unit_key = {'name': 'foo', 'version': '1.5', 'architecture': 'x86_64'} unit_key_hash = sync.get_key_hash(unit_key) deb_data = { unit_key_hash: { 'file_name': 'foo.deb', 'file_size': '5' } } self.step.parent = mock.MagicMock(deb_data=deb_data) self.step.parent.step_get_local_units.units_to_download = [unit_key] mock_stat.return_value.st_size = 5 initialized_unit = Unit(constants.DEB_TYPE_ID, unit_key, {}, 'some/directory') save_location = sync.generate_internal_storage_path('foo.deb') self.step.conduit.init_unit.return_value = initialized_unit self.step.process_main() self.step.conduit.init_unit.assert_called_once_with(constants.DEB_TYPE_ID, unit_key, {'file_name': 'foo.deb'}, save_location) source = os.path.join(self.working_dir, save_location) mock_shutil.assert_called_once_with(source, initialized_unit.storage_path)
def _generate_distribution_unit(self, name, metadata={}): storage_path = os.path.join(self.working_dir, 'content', name) if not os.path.exists(storage_path): os.makedirs(storage_path) unit_key = {"id": name} unit_metadata = {"files": [ { "downloadurl": "http://download-01.eng.brq.redhat.com/pub/rhel/released/RHEL-6/6.4/" "Server/x86_64/os/images/boot.iso", "item_type": "distribution", "savepath": "/var/lib/pulp/working/repos/distro/importers/yum_importer/tmpGn5a2b/" "tmpE7TPuQ/images/boot.iso", "checksumtype": "sha256", "relativepath": "images/boot.iso", "checksum": "929669e1203117f2b6a0d94f963af11db2eafe84f05c42c7e582d285430dc7a4", "pkgpath": "/var/lib/pulp/content/distribution/ks-Red Hat Enterprise Linux-Server-" "6.4-x86_64/images", "filename": "boot.iso" } ]} unit_metadata.update(metadata) self._touch(os.path.join(storage_path, 'images', 'boot.iso')) return Unit(TYPE_ID_DISTRO, unit_key, unit_metadata, storage_path)
def setUp(self): path = os.path.join(DATA_DIR, 'erratum.pickle') self.erratum = pickle.load(open(path)) unit_key = {'id': self.erratum['id']} metadata = dict((k, v) for k, v in self.erratum.items() if not k.startswith('_')) del metadata['id'] self.unit = Unit(models.Errata.TYPE, unit_key, metadata, '')
def test_rpms_applicable_to_consumer(self): errata_rpms = [] prof = YumProfiler() applicable_rpms, old_rpms = prof._rpms_applicable_to_consumer( Consumer("test", {}), errata_rpms) self.assertEqual(applicable_rpms, []) self.assertEqual(old_rpms, {}) # Get rpm dictionaries embedded in an errata errata_obj = self.get_test_errata_object() errata_unit = Unit(TYPE_ID_ERRATA, {"id": errata_obj["id"]}, errata_obj, None) errata_rpms = prof._get_rpms_from_errata(errata_unit) # Test with 2 newer RPMs in the test errata # The consumer has already been configured with a profile containing 'emoticons' and # 'patb' rpms applicable_rpms, old_rpms = prof._rpms_applicable_to_consumer( self.test_consumer, errata_rpms) self.assertTrue(applicable_rpms) self.assertTrue(old_rpms) self.assertEqual(len(applicable_rpms), 2) self.assertTrue("emoticons x86_64" in old_rpms) self.assertEqual("emoticons", old_rpms["emoticons x86_64"]["installed"]["name"]) self.assertEqual("0.1", old_rpms["emoticons x86_64"]["installed"]["version"])
def test_link_unit(self, mock_link): # Setup from_unit = Unit('t1', {'k': 'v1'}, {'m': 'm'}, 'p') from_unit.id = 'from-unit' to_unit = Unit('t2', {'k': 'v2'}, {'m': 'm'}, 'p') to_unit.id = 'to-unit' # Test self.mixin.link_unit(from_unit, to_unit) # Verify self.assertEqual(1, mock_link.call_count) self.assertEqual(mock_link.call_args[0][0], from_unit.type_id) self.assertEqual(mock_link.call_args[0][1], from_unit.id) self.assertEqual(mock_link.call_args[0][2], to_unit.type_id) self.assertEqual(mock_link.call_args[0][3], [to_unit.id])
def init_unit(type_id, unit_key, metadata, relative_path): storage_path = os.path.join(working_dir, relative_path) storage_dir = os.path.dirname(storage_path) if not os.path.exists(storage_dir): os.makedirs(storage_dir) unit = Unit(type_id, unit_key, metadata, storage_path) return unit
def to_plugin_unit(pulp_unit, unit_type_id, unit_key_fields): """ Parses the raw dictionary of a content unit into its plugin representation. :param pulp_unit: raw dictionary of unit metadata :type pulp_unit: dict :param unit_type_id: unique identifier for the type of unit :type unit_type_id: str :param unit_key_fields: collection of keys required for the type's unit key :type unit_key_fields: list or tuple :return: plugin unit representation of the given unit :rtype: pulp.plugins.model.Unit """ # Copy so we don't mangle the original unit pulp_unit = dict(pulp_unit) unit_key = {} for k in unit_key_fields: unit_key[k] = pulp_unit.pop(k) storage_path = pulp_unit.pop('_storage_path', None) unit_id = pulp_unit.pop('_id', None) u = Unit(unit_type_id, unit_key, pulp_unit, storage_path) u.id = unit_id return u
def side_effect(type_id, key, metadata, rel_path): if rel_path and pkg_dir: rel_path = os.path.join(pkg_dir, rel_path) if not os.path.exists(os.path.dirname(rel_path)): os.makedirs(os.path.dirname(rel_path)) unit = Unit(type_id, key, metadata, rel_path) return unit
def test_handle_yum_metadata_file(self): # Setup unit_key = {'data_type': 'product-id', 'repo_id': 'test-repo'} metadata = {'local_path': 'repodata/productid', 'checksum': 'abcdef', 'checksumtype': 'sha256'} config = PluginCallConfiguration({}, {}) mock_repo = mock.MagicMock() mock_conduit = mock.MagicMock() inited_unit = Unit(models.YumMetadataFile.TYPE, unit_key, metadata, self.upload_dest_filename) mock_conduit.init_unit.return_value = inited_unit # Test upload._handle_yum_metadata_file(mock_repo, models.YumMetadataFile.TYPE, unit_key, metadata, self.upload_source_filename, mock_conduit, config) # Verify # File was moved correctly self.assertTrue(not os.path.exists(self.upload_source_filename)) self.assertTrue(os.path.exists(self.upload_dest_filename)) # Conduit calls expected_relative_path = 'test-repo/repodata/productid' mock_conduit.init_unit.assert_called_once_with(models.YumMetadataFile.TYPE, unit_key, metadata, expected_relative_path) mock_conduit.save_unit.assert_called_once() saved_unit = mock_conduit.save_unit.call_args[0][0] self.assertEqual(inited_unit, saved_unit)
def to_plugin_unit(pulp_unit, type_def): """ Parses the raw dictionary of a content unit into its plugin representation. @param pulp_unit: raw dictionary of unit metadata @type pulp_unit: dict @param type_def: Pulp stored definition for the unit type @type type_def: pulp.server.db.model.content.ContentType @return: plugin unit representation of the given unit @rtype: pulp.plugins.model.Unit """ # Copy so we don't mangle the original unit pulp_unit = dict(pulp_unit) key_list = type_def['unit_key'] unit_key = {} for k in key_list: unit_key[k] = pulp_unit.pop(k) storage_path = pulp_unit.pop('_storage_path', None) unit_id = pulp_unit.pop('_id', None) u = Unit(type_def['id'], unit_key, pulp_unit, storage_path) u.id = unit_id return u
def get_repo_units(self, repo_id, content_type_id, additional_unit_fields=None): """ Searches for units in the given repository with given content type and returns a plugin unit containing unit id, unit key and any additional fields requested. :param repo_id: repo id :type repo_id: str :param content_type_id: content type id of the units :type content_type_id: str :param additional_unit_fields: additional fields from the unit metadata to be added in the result :type additional_unit_fields: list of str :return: list of unit instances :rtype: list of pulp.plugins.model.Unit """ additional_unit_fields = additional_unit_fields or [] try: unit_key_fields = units_controller.get_unit_key_fields_for_type( content_type_id) # Query repo association manager to get all units of given type # associated with given repo. Limit data by requesting only the fields # that are needed. query_manager = managers.repo_unit_association_query_manager() unit_fields = list(unit_key_fields) + list(additional_unit_fields) criteria = UnitAssociationCriteria(association_fields=['unit_id'], unit_fields=unit_fields) units = query_manager.get_units_by_type(repo_id, content_type_id, criteria) # Convert units to plugin units with unit_key and required metadata values for each unit all_units = [] for unit in units: unit_key = {} metadata = {} for k in unit_key_fields: unit_key[k] = unit['metadata'].pop(k) # Add unit_id and any additional unit fields requested by plugins metadata['unit_id'] = unit.pop('unit_id') for field in additional_unit_fields: metadata[field] = unit['metadata'].pop(field, None) u = Unit(content_type_id, unit_key, metadata, None) all_units.append(u) return all_units except Exception, e: _logger.exception( _('Exception from server getting units from repo [%s]' % repo_id)) raise self.exception_class(e), None, sys.exc_info()[2]
def test_associate_from_repo_no_criteria(self): # Setup source_repo_id = 'source-repo' dest_repo_id = 'dest-repo' self.repo_manager.create_repo(source_repo_id) self.importer_manager.set_importer(source_repo_id, 'mock-importer', {}) self.repo_manager.create_repo(dest_repo_id) self.importer_manager.set_importer(dest_repo_id, 'mock-importer', {}) self.content_manager.add_content_unit('mock-type', 'unit-1', {'key-1': 'unit-1'}) self.content_manager.add_content_unit('mock-type', 'unit-2', {'key-1': 'unit-2'}) self.content_manager.add_content_unit('mock-type', 'unit-3', {'key-1': 'unit-3'}) self.manager.associate_unit_by_id(source_repo_id, 'mock-type', 'unit-1') self.manager.associate_unit_by_id(source_repo_id, 'mock-type', 'unit-2') self.manager.associate_unit_by_id(source_repo_id, 'mock-type', 'unit-3') fake_user = User('associate-user', '') manager_factory.principal_manager().set_principal(principal=fake_user) mock_plugins.MOCK_IMPORTER.import_units.return_value = [ Unit('mock-type', {'k': 'v'}, {}, '') ] # Test results = self.manager.associate_from_repo(source_repo_id, dest_repo_id) associated = results['units_successful'] # Verify self.assertEqual(1, len(associated)) self.assertEqual(associated[0]['type_id'], 'mock-type') self.assertEqual(associated[0]['unit_key'], {'k': 'v'}) self.assertEqual(1, mock_plugins.MOCK_IMPORTER.import_units.call_count) args = mock_plugins.MOCK_IMPORTER.import_units.call_args[0] kwargs = mock_plugins.MOCK_IMPORTER.import_units.call_args[1] self.assertTrue(isinstance(args[0], Repository)) # repository transfer object self.assertEqual(args[0].id, 'source-repo') # repo importing units from self.assertEqual(args[1].id, 'dest-repo') # repo importing units into self.assertEqual(None, kwargs['units']) # units to import self.assertTrue(isinstance(args[3], PluginCallConfiguration)) # config conduit = args[2] self.assertTrue(isinstance(conduit, ImportUnitConduit)) # Clean Up manager_factory.principal_manager().set_principal(principal=None)
def test_process_item(self, mock_symlink): step = steps.PublishContentStep(working_dir='/foo/bar') test_unit = Unit(constants.DEB_TYPE_ID, {}, {'file_name': 'apples.deb'}, storage_path='/some/random/apples.deb') step.process_main(item=test_unit) mock_symlink.assert_called_once_with('/some/random/apples.deb', '/foo/bar/apples.deb')
def test_saves_unit(self, mock_get_multiple): mock_get_multiple.return_value = [{'foo': 'a'}] self.parent.available_units = [{'foo': 'a'}] self.step.process_main() self.step.conduit.save_unit.assert_called_once_with(Unit('fake_unit_type', {'foo': 'a'}, {}, ''))
def test_generate_dep_data(self, mock_open): class FakeDB(dict): """Fake version of gdbm database""" close_called = False def close(self): self.close_called = True mock_open.return_value = FakeDB() units = [ Unit(constants.TYPE_PUPPET_MODULE, { 'name': 'foo', 'version': '1.0.3', 'author': 'me' }, {'dependencies': []}, '/tmp'), Unit(constants.TYPE_PUPPET_MODULE, { 'name': 'foo', 'version': '1.1.0', 'author': 'me' }, {'dependencies': []}, '/tmp'), Unit(constants.TYPE_PUPPET_MODULE, { 'name': 'bar', 'version': '1.0.0', 'author': 'me' }, { 'dependencies': [{ 'name': 'me/foo', 'version_requirement': '>= 1.0.0' }] }, '/tmp'), ] self.run._generate_dependency_data(units) db = mock_open.return_value self.assertTrue(db.close_called) self.assertTrue('me/foo' in db) self.assertTrue('me/bar' in db) foo_data = json.loads(db['me/foo']) self.assertEqual(len(foo_data), 2) bar_data = json.loads(db['me/bar']) self.assertEqual(len(bar_data), 1) self.assertEqual(bar_data[0]['dependencies'][0]['name'], 'me/foo') self.assertEqual(bar_data[0]['dependencies'][0]['version_requirement'], '>= 1.0.0')
def test_rpm(self, mock_copyfile): model = model_factory.rpm_models(1)[0] unit = Unit(model.TYPE, model.unit_key, model.metadata, '/') # passing "None" ensures that the importer isn't being called ret = associate._associate_unit('', None, unit) self.assertTrue(ret is unit) self.assertEqual(mock_copyfile.call_count, 0)
def init_unit(self, type_id, unit_key, metadata, relative_path): """ Initializes the Pulp representation of a content unit. The conduit will use the provided information to generate any unit metadata that it needs to. A populated transfer object representation of the unit will be returned from this call. The returned unit should be used in subsequent calls to this conduit. This call makes no changes to the Pulp server. At the end of this call, the unit's id field will *not* be populated. The unit_key and metadata will be merged as they are saved in Pulp to form the full representation of the unit. If values are specified in both dictionaries, the unit_key value takes precedence. If the importer wants to save the bits for the unit, the relative_path value should be used to indicate a unique -- with respect to the type of unit -- relative path where it will be saved. Pulp will convert this into an absolute path on disk where the unit should actually be saved. The absolute path is stored in the returned unit object. @param type_id: must correspond to a type definition in Pulp @type type_id: str @param unit_key: dictionary of whatever fields are necessary to uniquely identify this unit from others of the same type @type unit_key: dict @param metadata: dictionary of key-value pairs to describe the unit @type metadata: dict @param relative_path: see above; may be None @type relative_path: str, None @return: object representation of the unit, populated by Pulp with both provided and derived values @rtype: pulp.plugins.model.Unit """ try: # Generate the storage location if relative_path is not None: content_query_manager = manager_factory.content_query_manager() path = content_query_manager.request_content_unit_file_path( type_id, relative_path) else: path = None u = Unit(type_id, unit_key, metadata, path) return u except Exception, e: msg = _( 'Exception from server requesting unit filename for relative path [%s]' ) msg = msg % relative_path _logger.exception(msg) raise ImporterConduitException(e), None, sys.exc_info()[2]
def _generate_category_unit(self, name): unit_key = {'id': name} unit_metadata = {'id': name, 'user_visible': True, 'display_order': 0, 'name': name, 'description': name + u' – description', 'grouplist': []} storage_path = os.path.join(self.working_dir, name) return Unit(TYPE_ID_PKG_CATEGORY, unit_key, unit_metadata, storage_path)
def _make_units(self, rpms): """ Turn each of the rpms in the list into self.unit_# (where # is the index of the RPM plus one), and also save the generated units as self.units. """ self.units = [] for i, rpm in enumerate(rpms): unit = Unit(rpm.TYPE, rpm.unit_key, rpm.metadata, '') setattr(self, 'unit_%s' % i, unit) self.units.append(unit)
def _generate_environment_unit(self, name): unit_key = {'id': name} unit_metadata = {'id': name, 'display_order': 0, 'name': name, 'description': name + u' – description', 'grouplist': [], 'optionlist': []} storage_path = os.path.join(self.working_dir, name) return Unit(TYPE_ID_PKG_ENVIRONMENT, unit_key, unit_metadata, storage_path)