def test_with_busybox(self): models = [ DockerImage(data.busybox_ids[0], data.busybox_ids[1], 1024), ] dest = tempfile.mkdtemp() try: # prepare some state model_dest = os.path.join(dest, models[0].relative_path) unit = Unit(DockerImage.TYPE_ID, models[0].unit_key, models[0].unit_metadata, model_dest) self.conduit.init_unit.return_value = unit # call the save, letting it write files to disk upload.save_models(self.conduit, models, data.busybox_ids, data.busybox_tar_path) # assertions! self.conduit.save_unit.assert_called_once_with(unit) # make sure the ancestry was computed and saved correctly ancestry = json.load(open(os.path.join(model_dest, 'ancestry'))) self.assertEqual(set(ancestry), set(data.busybox_ids)) # make sure these files were moved into place self.assertTrue(os.path.exists(os.path.join(model_dest, 'json'))) self.assertTrue(os.path.exists(os.path.join(model_dest, 'layer'))) finally: shutil.rmtree(dest)
def test_with_busybox(self): images = [ models.Image(data.busybox_ids[0], data.busybox_ids[1], 1024), ] dest = tempfile.mkdtemp() try: # prepare some state image_dest = os.path.join(dest, images[0].relative_path) unit = Unit(models.Image.TYPE_ID, images[0].unit_key, images[0].unit_metadata, image_dest) self.conduit.init_unit.return_value = unit # call the save, letting it write files to disk upload.save_models(self.conduit, images, data.busybox_ids, data.busybox_tar_path) # assertions! self.conduit.save_unit.assert_called_once_with(unit) # make sure the ancestry was computed and saved correctly ancestry = json.load(open(os.path.join(image_dest, 'ancestry'))) self.assertEqual(set(ancestry), set(data.busybox_ids)) # make sure these files were moved into place self.assertTrue(os.path.exists(os.path.join(image_dest, 'json'))) self.assertTrue(os.path.exists(os.path.join(image_dest, 'layer'))) finally: shutil.rmtree(dest)
def test_path_exists(self, mock_exists): model = models.Image('abc123', 'xyz789', 1024) upload.save_models(self.conduit, [model], (model.image_id,), data.busybox_tar_path) self.assertEqual(self.conduit.save_unit.call_count, 1) self.conduit.init_unit.assert_called_once_with(constants.IMAGE_TYPE_ID, model.unit_key, model.unit_metadata, model.relative_path) self.conduit.save_unit.assert_called_once_with(self.conduit.init_unit.return_value)
def test_path_exists(self, mock_exists): model = DockerImage('abc123', 'xyz789', 1024) upload.save_models(self.conduit, [model], (model.image_id, ), data.busybox_tar_path) self.assertEqual(self.conduit.save_unit.call_count, 1) self.conduit.init_unit.assert_called_once_with(constants.IMAGE_TYPE_ID, model.unit_key, model.unit_metadata, model.relative_path) self.conduit.save_unit.assert_called_once_with( self.conduit.init_unit.return_value)
def upload_unit(self, repo, type_id, unit_key, metadata, file_path, conduit, config): """ Upload a docker image. The file should be the product of "docker save". This will import all images in that tarfile into the specified repository, each as an individual unit. This will also update the repo's tags to reflect the tags present in the tarfile. The following is copied from the superclass. :param repo: metadata describing the repository :type repo: pulp.plugins.model.Repository :param type_id: type of unit being uploaded :type type_id: str :param unit_key: identifier for the unit, specified by the user :type unit_key: dict :param metadata: any user-specified metadata for the unit :type metadata: dict :param file_path: path on the Pulp server's filesystem to the temporary location of the uploaded file; may be None in the event that a unit is comprised entirely of metadata and has no bits associated :type file_path: str :param conduit: provides access to relevant Pulp functionality :type conduit: pulp.plugins.conduits.unit_add.UnitAddConduit :param config: plugin configuration for the repository :type config: pulp.plugins.config.PluginCallConfiguration :return: A dictionary describing the success or failure of the upload. It must contain the following keys: 'success_flag': bool. Indicates whether the upload was successful 'summary': json-serializable object, providing summary 'details': json-serializable object, providing details :rtype: dict """ # retrieve metadata from the tarball metadata = tarutils.get_metadata(file_path) # turn that metadata into a collection of models mask_id = config.get(constants.CONFIG_KEY_MASK_ID) models = upload.get_models(metadata, mask_id) ancestry = tarutils.get_ancestry(models[0].image_id, metadata) # save those models as units in pulp upload.save_models(conduit, models, ancestry, file_path) upload.update_tags(repo.id, file_path)