def process_main(self): """ determine what images are available upstream, get the upstream tags, and save a list of available unit keys on the parent step """ super(GetMetadataStep, self).process_main() download_dir = self.get_working_dir() _logger.debug(self.description) # determine what images are available by querying the upstream source available_images = self.parent.v1_index_repository.get_image_ids() # get remote tags and save them on the parent self.parent.v1_tags.update(self.parent.v1_index_repository.get_tags()) # transform the tags so they contain full image IDs instead of abbreviations self.expand_tag_abbreviations(available_images, self.parent.v1_tags) tagged_image_ids = self.parent.v1_tags.values() # retrieve ancestry files and then parse them to determine the full # collection of upstream images that we should ensure are obtained. self.parent.v1_index_repository.get_ancestry(tagged_image_ids) images_we_need = set(tagged_image_ids) for image_id in tagged_image_ids: images_we_need.update( set(self.find_and_read_ancestry_file(image_id, download_dir))) # generate Images and store them on the parent self.parent.v1_available_units.extend( models.Image(image_id=i) for i in images_we_need)
def test_list_files(self): unit = models.Image() unit.set_storage_path() names = ('ancestry', 'json', 'layer') files = list(unit.list_files()) self.assertEqual(files, [os.path.join(unit.storage_path, n) for n in names])
def test_add_unit_metadata_with_tag(self): unit = models.Image(image_id='foo_image', parent_id='foo_parent', size=2048) test_result = {'id': 'foo_image'} result_json = json.dumps(test_result) self.context.tags = {'bar': 'foo_image'} self.context.redirect_url = 'http://www.pulpproject.org/foo/' self.context.add_unit_metadata(unit) self.context.metadata_file_handle.write.assert_called_once_with(result_json)
def test_generate_download_reqs_existing_dir(self, mock_working_dir, mock_v1_check, mock_v2_check): mock_working_dir.return_value = tempfile.mkdtemp() self.config.override_config[constants.CONFIG_KEY_ENABLE_V1] = True step = sync.SyncStep(self.repo, self.conduit, self.config) step.v1_step_get_local_units.units_to_download.append( models.Image(image_id='image1')) os.makedirs(os.path.join(step.working_dir, 'image1')) try: # just make sure this doesn't complain list(step.v1_generate_download_requests()) finally: shutil.rmtree(step.working_dir)
def test_generate_download_reqs_perm_denied(self, mock_working_dir, mock_v1_check, mock_v2_check): mock_working_dir.return_value = tempfile.mkdtemp() self.config.override_config[constants.CONFIG_KEY_ENABLE_V1] = True try: step = sync.SyncStep(self.repo, self.conduit, self.config) step.v1_step_get_local_units.units_to_download.append( models.Image(image_id='image1')) step.working_dir = '/not/allowed' # make sure the permission denies OSError bubbles up self.assertRaises(OSError, list, step.v1_generate_download_requests()) finally: shutil.rmtree(mock_working_dir.return_value)
def test_generate_download_reqs_creates_dir(self, mock_working_dir, mock_v1_check, mock_v2_check): mock_working_dir.return_value = tempfile.mkdtemp() self.config.override_config[constants.CONFIG_KEY_ENABLE_V1] = True step = sync.SyncStep(self.repo, self.conduit, self.config) step.v1_step_get_local_units.units_to_download.append( models.Image(image_id='image1')) try: list(step.v1_generate_download_requests()) # make sure it created the destination directory self.assertTrue( os.path.isdir(os.path.join(step.working_dir, 'image1'))) finally: shutil.rmtree(step.working_dir)
def test_generate_download_reqs_ancestry_exists(self, mock_working_dir, mock_v1_check, mock_v2_check): mock_working_dir.return_value = tempfile.mkdtemp() self.config.override_config[constants.CONFIG_KEY_ENABLE_V1] = True step = sync.SyncStep(self.repo, self.conduit, self.config) step.v1_step_get_local_units.units_to_download.append( models.Image(image_id='image1')) os.makedirs(os.path.join(step.working_dir, 'image1')) # simulate the ancestry file already existing open(os.path.join(step.working_dir, 'image1/ancestry'), 'w').close() try: # there should only be 2 reqs instead of 3, since the ancestry file already exists reqs = list(step.v1_generate_download_requests()) self.assertEqual(len(reqs), 2) finally: shutil.rmtree(step.working_dir)
def test_v1_generate_download_requests(self, mock_working_dir, mock_v1_check, mock_v2_check): mock_working_dir.return_value = tempfile.mkdtemp() self.config.override_config[constants.CONFIG_KEY_ENABLE_V1] = True step = sync.SyncStep(self.repo, self.conduit, self.config) step.v1_step_get_local_units.units_to_download.append( models.Image(image_id='image1')) try: generator = step.v1_generate_download_requests() self.assertTrue(inspect.isgenerator(generator)) download_reqs = list(generator) self.assertEqual(len(download_reqs), 3) for req in download_reqs: self.assertTrue(isinstance(req, DownloadRequest)) finally: shutil.rmtree(step.working_dir)
def get_models(self, metadata, mask_id=''): """ Given image metadata, returns model instances to represent each layer of the image defined by the unit_key. :param metadata: a dictionary where keys are image IDs, and values are dictionaries with keys "parent" and "size", containing values for those two attributes as taken from the docker image metadata. :type metadata: dict :param mask_id: The ID of an image that should not be included in the returned models. This image and all of its ancestors will be excluded. :type mask_id: basestring :return: list of models.DockerImage instances :rtype: list """ images = [] existing_image_ids = set() leaf_image_ids = tarutils.get_youngest_children(metadata) for image_id in leaf_image_ids: while image_id: json_data = metadata[image_id] parent_id = json_data.get('parent') size = json_data['size'] if image_id not in existing_image_ids: # This will avoid adding multiple images with a same id, which can happen # in case of parents with multiple children. existing_image_ids.add(image_id) images.append( models.Image(image_id=image_id, parent_id=parent_id, size=size)) if parent_id == mask_id: break image_id = parent_id return images
def test_generate_download_requests_correct_urls(self, mock_working_dir, mock_v1_check, mock_v2_check): mock_working_dir.return_value = tempfile.mkdtemp() self.config.override_config[constants.CONFIG_KEY_ENABLE_V1] = True step = sync.SyncStep(self.repo, self.conduit, self.config) step.v1_step_get_local_units.units_to_download.append( models.Image(image_id='image1')) try: generator = step.v1_generate_download_requests() # make sure the urls are correct urls = [req.url for req in generator] self.assertTrue( 'http://pulpproject.org/v1/images/image1/ancestry' in urls) self.assertTrue( 'http://pulpproject.org/v1/images/image1/json' in urls) self.assertTrue( 'http://pulpproject.org/v1/images/image1/layer' in urls) finally: shutil.rmtree(step.working_dir)
def test_generate_download_requests_correct_destinations( self, mock_working_dir, mock_v1_check, mock_v2_check): mock_working_dir.return_value = tempfile.mkdtemp() self.config.override_config[constants.CONFIG_KEY_ENABLE_V1] = True step = sync.SyncStep(self.repo, self.conduit, self.config) step.v1_step_get_local_units.units_to_download.append( models.Image(image_id='image1')) try: generator = step.v1_generate_download_requests() # make sure the urls are correct destinations = [req.destination for req in generator] self.assertTrue( os.path.join(step.working_dir, 'image1', 'ancestry') in destinations) self.assertTrue( os.path.join(step.working_dir, 'image1', 'json') in destinations) self.assertTrue( os.path.join(step.working_dir, 'image1', 'layer') in destinations) finally: shutil.rmtree(step.working_dir)
def test_unit_key(self): image = models.Image(image_id='abc', parent_id='xyz', size=1024) self.assertEqual(image.unit_key, {'image_id': 'abc'})
def test_init_info(self): image = models.Image(image_id='abc', parent_id='xyz', size=1024) self.assertEqual(image.image_id, 'abc') self.assertEqual(image.parent_id, 'xyz') self.assertEqual(image.size, 1024)
def test_add_unit_metadata(self): unit = models.Image(image_id='foo_image', parent_id='foo_parent', size=2048) test_result = {'id': 'foo_image'} result_json = json.dumps(test_result) self.context.add_unit_metadata(unit) self.context.metadata_file_handle.write.assert_called_once_with(result_json)