def test_calls_download_boot_resources(self): self.patch(download_resources, "datetime", MockDateTime) storage_path = self.make_dir() snapshot_path = download_resources.compose_snapshot_path(storage_path) cache_path = os.path.join(storage_path, "cache") file_store = FileStore(cache_path) source = { "url": "http://example.com", "keyring": self.make_file("keyring"), } product_mapping = ProductMapping() fake = self.patch(download_resources, "download_boot_resources") download_resources.download_all_boot_resources( sources=[source], storage_path=storage_path, product_mapping=product_mapping, store=file_store, ) self.assertThat( fake, MockCalledWith( source["url"], file_store, snapshot_path, product_mapping, keyring_file=source["keyring"], ), )
def test_returns_files_from_cache(self): with tempdir() as cache_dir: store = FileStore(cache_dir) tar_xz, files = self.make_tar_xz(cache_dir) sha256, size = self.get_file_info(tar_xz) checksums = {"sha256": sha256} with open(tar_xz, "rb") as f: content_source = ChecksummingContentSource(f, checksums, size) download_resources.extract_archive_tar( store, os.path.basename(tar_xz), sha256, checksums, size, content_source, ) mocked_tar = self.patch(download_resources.tarfile, "open") cached_files = download_resources.extract_archive_tar( store, os.path.basename(tar_xz), sha256, checksums, size, content_source, ) self.assertThat(mocked_tar, MockNotCalled()) for f, info in files.items(): cached_file = os.path.join(cache_dir, "%s-%s" % (f, sha256)) expected_cached_file = (cached_file, f) self.assertIn(expected_cached_file, cached_files)
def do_sync(charm_conf, status_exchange): # NOTE(beisner): the user_agent variable was an unused assignment (lint). # It may be worth re-visiting its usage, intent and benefit with the # UrlMirrorReader call below at some point. Leaving it disabled for now, # and not assigning it since it is not currently utilized. # user_agent = charm_conf.get("user_agent") for mirror_info in charm_conf['mirror_list']: mirror_url, initial_path = path_from_mirror_url( mirror_info['url'], mirror_info['path']) log.info("configuring sync for url {}".format(mirror_info)) smirror = UrlMirrorReader(mirror_url, policy=policy) if charm_conf['use_swift']: store = SwiftObjectStore(SWIFT_DATA_DIR) else: # Use the local apache server to serve product streams store = FileStore(prefix=APACHE_DATA_DIR) content_id = charm_conf['content_id_template'].format( region=charm_conf['region']) config = { 'max_items': mirror_info['max'], 'modify_hook': charm_conf['modify_hook_scripts'], 'keep_items': True, 'content_id': content_id, 'cloud_name': charm_conf['cloud_name'], 'item_filters': mirror_info['item_filters'], 'hypervisor_mapping': charm_conf.get('hypervisor_mapping', False) } mirror_args = dict(config=config, objectstore=store, name_prefix=charm_conf['name_prefix']) mirror_args['custom_properties'] = charm_conf.get( 'custom_properties', False) if SIMPLESTREAMS_HAS_PROGRESS: log.info("Calling DryRun mirror to get item list") drmirror = glance.ItemInfoDryRunMirror(config=config, objectstore=store) drmirror.sync(smirror, path=initial_path) p = StatusMessageProgressAggregator(drmirror.items, status_exchange.send_message) mirror_args['progress_callback'] = p.progress_callback else: log.info("Detected simplestreams version without progress" " update support. Only limited feedback available.") tmirror = GlanceMirrorWithCustomProperties(**mirror_args) log.info("calling GlanceMirror.sync") tmirror.sync(smirror, path=initial_path)
def test_syncs_repo(self): fake_sync = self.patch(download_resources.RepoWriter, 'sync') storage_path = self.make_dir() snapshot_path = self.make_dir() cache_path = os.path.join(storage_path, 'cache') file_store = FileStore(cache_path) source_url = DEFAULT_IMAGES_URL download_resources.download_boot_resources(source_url, file_store, snapshot_path, None, None) self.assertEqual(1, len(fake_sync.mock_calls))
def download_all_boot_resources(sources, storage_path, product_mapping, store=None): """Download the actual boot resources. Local copies of boot resources are downloaded into a "cache" directory. This is a raw, flat store of resources, with UUID-based filenames called "tags." In addition, the downlads are hardlinked into a "snapshot directory." This directory, named after the date and time that the snapshot was initiated, reflects the currently available boot resources in a proper directory hierarchy with subdirectories for architectures, releases, and so on. :param sources: List of dicts describing the Simplestreams sources from which we should download. :param storage_path: Root storage directory, usually `/var/lib/maas/boot-resources`. :param snapshot_path: :param product_mapping: A `ProductMapping` describing the resources to be downloaded. :param store: A `FileStore` instance. Used only for testing. :return: Path to the snapshot directory. """ storage_path = os.path.abspath(storage_path) snapshot_path = compose_snapshot_path(storage_path) # Use a FileStore as our ObjectStore implementation. It will write to the # cache directory. if store is None: cache_path = os.path.join(storage_path, "cache") store = FileStore(cache_path) # XXX jtv 2014-04-11: FileStore now also takes an argument called # complete_callback, which can be used for progress reporting. for source in sources: download_boot_resources( source["url"], store, snapshot_path, product_mapping, keyring_file=source.get("keyring"), ), return snapshot_path
def test_extracts_files(self): with tempdir() as cache_dir: store = FileStore(cache_dir) tar_xz, files = self.make_tar_xz(cache_dir) sha256, size = self.get_file_info(tar_xz) checksums = {'sha256': sha256} with open(tar_xz, 'rb') as f: content_source = ChecksummingContentSource(f, checksums, size) cached_files = download_resources.extract_archive_tar( store, os.path.basename(tar_xz), sha256, checksums, size, content_source) for f, info in files.items(): cached_file = os.path.join(cache_dir, '%s-%s' % (f, sha256)) expected_cached_file = (cached_file, f) self.assertIn(expected_cached_file, cached_files) self.assertTrue(os.path.exists(cached_file)) self.assertEqual(info, self.get_file_info(cached_file))
def test_calls_download_boot_resources(self): self.patch(download_resources, 'datetime', MockDateTime) storage_path = self.make_dir() snapshot_path = download_resources.compose_snapshot_path(storage_path) cache_path = os.path.join(storage_path, 'cache') file_store = FileStore(cache_path) source = { 'url': 'http://example.com', 'keyring': self.make_file("keyring"), } product_mapping = ProductMapping() fake = self.patch(download_resources, 'download_boot_resources') download_resources.download_all_boot_resources( sources=[source], storage_path=storage_path, product_mapping=product_mapping, store=file_store) self.assertThat( fake, MockCalledWith(source['url'], file_store, snapshot_path, product_mapping, keyring_file=source['keyring']))