def test_AddUnits_error_missing_layer(self, _repo_controller, _Manifest_save, _Blob_save): # This is where we will untar the image step_work_dir = os.path.join(self.work_dir, "working_dir") os.makedirs(step_work_dir) img, layers = self._create_image() manifest_data = dict(layers=[ dict(digest=x['digest'], mediaType="ignored") for x in layers ], config=dict(digest="abc"), schemaVersion=2) units = [ models.Manifest.from_json(json.dumps(manifest_data), digest="012"), ] units.extend( models.Blob(digest="sha256:%s" % x['digest']) for x in layers) # This layer doesn't exist units.append(models.Blob(digest="sha256:this-is-missing")) parent = mock.MagicMock() parent.configure_mock(file_path=img, parent=None) parent.v2_step_get_local_units.units_to_download = units step = upload.AddUnits(step_type=constants.UPLOAD_STEP_SAVE, working_dir=step_work_dir) step.parent = parent with self.assertRaises(upload.PulpCodedValidationException) as ctx: step.process_lifecycle() self.assertEquals("DKR1018", ctx.exception.error_code.code) self.assertEquals( "Layer this-is-missing.tar is not present in the image", str(ctx.exception))
def test_AddUnits__mf_exists(self, _Manifest_objects, _repo_controller, _Manifest_save, _Blob_save): # This is where we will untar the image step_work_dir = os.path.join(self.work_dir, "working_dir") os.makedirs(step_work_dir) img, layers = self._create_image() manifest_data = dict(layers=[ dict(digest=x['digest'], mediaType="ignored") for x in layers ], config=dict(digest="abc"), schemaVersion=2) units = [ models.Manifest.from_json(json.dumps(manifest_data), digest="012"), ] units.extend( models.Blob(digest="sha256:%s" % x['digest']) for x in layers) parent = mock.MagicMock(file_path=img, uploaded_unit=None) parent.v2_step_get_local_units.units_to_download = [] parent.available_units = units step = upload.AddUnits(step_type=constants.UPLOAD_STEP_SAVE, working_dir=step_work_dir) step.parent = parent step.process_lifecycle() # Make sure a manifest was looked up and added in the parent's # uploaded_unit self.assertEquals(_Manifest_objects.get.return_value, parent.uploaded_unit)
def test_generate_download_requests(self, _working_directory_path): """ Assert correct operation of the generate_download_requests() method. """ _working_directory_path.return_value = self.working_dir repo = mock.MagicMock() conduit = mock.MagicMock() config = plugin_config.PluginCallConfiguration( {}, { 'feed': 'https://registry.example.com', 'upstream_name': 'busybox', importer_constants.KEY_MAX_DOWNLOADS: 25 }) step = sync.SyncStep(repo, conduit, config) step.step_get_local_blobs.units_to_download = [ models.Blob(digest=i) for i in ['cool', 'stuff'] ] requests = step.generate_download_requests() requests = list(requests) self.assertEqual(len(requests), 2) self.assertEqual(requests[0].url, 'https://registry.example.com/v2/busybox/blobs/cool') self.assertEqual(requests[0].destination, os.path.join(self.working_dir, 'cool')) self.assertEqual(requests[0].data, None) self.assertEqual(requests[0].headers, None) self.assertEqual( requests[1].url, 'https://registry.example.com/v2/busybox/blobs/stuff') self.assertEqual(requests[1].destination, os.path.join(self.working_dir, 'stuff')) self.assertEqual(requests[1].data, None) self.assertEqual(requests[1].headers, None)
def process_main(self): """ Determine which manifests and blobs are available upstream, get the upstream tags, and save a list of available unit keys and manifests on the SyncStep. """ super(DownloadManifestsStep, self).process_main() _logger.debug(self.description) available_tags = self.parent.index_repository.get_tags() # This will be a set of Blob digests. The set is used because they can be repeated and we # only want to download each layer once. available_blobs = set() self.total_units = len(available_tags) man_list = 'application/vnd.docker.distribution.manifest.list.v2+json' for tag in available_tags: manifests = self.parent.index_repository.get_manifest(tag) for manifest in manifests: manifest, digest, content_type = manifest if content_type == man_list: self._process_manifest_list(manifest, digest, available_blobs, tag) else: has_foreign_layer = self._process_manifest( manifest, digest, available_blobs, tag) if has_foreign_layer: # we don't want to process schema1 manifest with foreign layers break # Update the available units with the Manifests and Blobs we learned about available_blobs = [models.Blob(digest=d) for d in available_blobs] self.parent.available_blobs.extend(available_blobs)
def test_process_main_new_blobs(self, associate_single_unit): """ Test process_main() when there are new Blobs that were downloaded. """ step = sync.SaveUnitsStep() digests = ( 'sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef', 'sha256:cc8567d70002e957612902a8e985ea129d831ebe04057d88fb644857caa45d11' ) step.parent = mock.MagicMock() step.parent.get_working_dir.return_value = '/some/path' step.parent.get_repo.return_value = mock.MagicMock() step.parent.step_get_local_manifests.units_to_download = [] step.parent.step_get_local_blobs.units_to_download = [ models.Blob(digest=digest) for digest in digests ] units = list(step.get_iterator()) for unit in units: unit.save_and_import_content = mock.MagicMock() step.process_main(item=unit) path = os.path.join('/some/path', unit.digest) unit.save_and_import_content.assert_called_once_with(path) self.assertEqual(associate_single_unit.mock_calls[-1][1][0], step.parent.get_repo.return_value.repo_obj) self.assertEqual(associate_single_unit.mock_calls[-1][1][1], unit)
def test_unit_key(self): """ Assert correct behavior from the unit_key() method. """ digest = 'sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef' blob = models.Blob(digest=digest) self.assertEqual(blob.unit_key, {'digest': digest})
def test_AddUnits(self, _repo_controller, _Manifest_save, _Blob_save): # This is where we will untar the image step_work_dir = os.path.join(self.work_dir, "working_dir") os.makedirs(step_work_dir) img, layers = self._create_image() manifest_data = dict(layers=[ dict(digest=x['digest'], mediaType="ignored") for x in layers ], config=dict(digest="abc"), schemaVersion=2) units = [ models.Manifest.from_json(json.dumps(manifest_data), digest="012"), ] units.extend( models.Blob(digest="sha256:%s" % x['digest']) for x in layers) parent = mock.MagicMock(file_path=img, parent=None, uploaded_unit=None) parent.v2_step_get_local_units.units_to_download = units step = upload.AddUnits(step_type=constants.UPLOAD_STEP_SAVE, working_dir=step_work_dir) step.parent = parent step.process_lifecycle() dst_blobs = [] # Make sure the blobs were created, and not compressed for i, layer in enumerate(layers): dst = os.path.join(step_work_dir, "sha256:%s" % layer['digest']) self.assertEquals(layer['content'], open(dst).read()) dst_blobs.append(dst) # Make sure we called save_and_import_content self.assertEquals([mock.call(x) for x in dst_blobs], _Blob_save.call_args_list) _Manifest_save.assert_called_once_with( os.path.join(step_work_dir, "012")) # Make sure associate_single_unit got called repo_obj = parent.get_repo.return_value.repo_obj self.assertEquals( [mock.call(repo_obj, x) for x in units], _repo_controller.associate_single_unit.call_args_list) self.assertEquals(units[0], parent.uploaded_unit)
def get_models(self, manifest): """ Given an image manifest, returns model instances to represent each blob of the image defined by the unit_key. :param manifest: An initialized Manifest object :type manifest: pulp_docker.plugins.models.Manifest :return: list of models.Blob instances :rtype: list """ available_blobs = set() for layer in manifest.fs_layers: # skip foreign blobs if layer.layer_type == constants.FOREIGN_LAYER: continue else: available_blobs.add(layer.blob_sum) if manifest.config_layer: available_blobs.add(manifest.config_layer) available_blobs = [models.Blob(digest=d) for d in available_blobs] return available_blobs
def test_process_main_new_blobs_and_manifests(self, associate_single_unit): """ Test process_main() when there are new Blobs and one Manifest that were downloaded. """ working_dir = '/working/dir/' step = sync.SaveUnitsStep() # Simulate two newly downloaded blobs blob_digests = ( 'sha256:5f70bf18a086007016e948b04aed3b82103a36bea41755b6cddfaf10ace3c6ef', 'sha256:cc8567d70002e957612902a8e985ea129d831ebe04057d88fb644857caa45d11' ) step.parent = mock.MagicMock() step.parent.get_working_dir.return_value = working_dir step.parent.get_repo.return_value = mock.MagicMock() step.parent.step_get_local_blobs.units_to_download = [ models.Blob(digest=digest) for digest in blob_digests ] # Simulate one newly downloaded manifest with open(os.path.join( TEST_DATA_PATH, 'manifest_repeated_layers.json')) as manifest_file: manifest = manifest_file.read() manifest_digest = 'sha256:a001e892f3ba0685184486b08cda99bf81f551513f4b56e72954a1d4404195b1' manifest = models.Manifest.from_json(manifest, manifest_digest) step.parent.step_get_local_metadata.units_to_download = [manifest] units = list(step.get_iterator()) for unit in units: unit.save_and_import_content = mock.MagicMock() step.process_main(item=unit) path = os.path.join(working_dir, unit.digest) unit.save_and_import_content.assert_called_once_with(path) self.assertEqual(associate_single_unit.mock_calls[-1][1][0], step.parent.get_repo.return_value.repo_obj) self.assertEqual(associate_single_unit.mock_calls[-1][1][1], unit)
def _Blob(self, digest): fname = os.path.join(self.content_dir, digest) open(fname, "w") m = models.Blob(digest=digest) m._storage_path = fname return m