def __init__(self, config, objectstore=None, region=None, name_prefix=None, progress_callback=None, client=None): super(GlanceMirror, self).__init__(config=config) self.item_filters = self.config.get('item_filters', []) if len(self.item_filters) == 0: self.item_filters = [ 'ftype~(disk1.img|disk.img)', 'arch~(x86_64|amd64|i386)' ] self.item_filters = filters.get_filters(self.item_filters) self.index_filters = self.config.get('index_filters', []) if len(self.index_filters) == 0: self.index_filters = ['datatype=image-downloads'] self.index_filters = filters.get_filters(self.index_filters) self.loaded_content = {} self.store = objectstore if client is None: client = openstack self.keystone_creds = client.load_keystone_creds() self.name_prefix = name_prefix or "" if region is not None: self.keystone_creds['region_name'] = region self.progress_callback = progress_callback conn_info = client.get_service_conn_info('image', **self.keystone_creds) self.glance_api_version = conn_info['glance_version'] self.gclient = get_glanceclient(version=self.glance_api_version, **conn_info) self.tenant_id = conn_info['tenant_id'] self.region = self.keystone_creds.get('region_name', 'nullregion') self.cloudname = config.get("cloud_name", 'nullcloud') self.crsn = '-'.join(( self.cloudname, self.region, )) self.auth_url = self.keystone_creds['auth_url'] self.content_id = config.get("content_id") self.modify_hook = config.get("modify_hook") self.inserts = {} if not self.content_id: raise TypeError("content_id is required")
def query(self, img_filter): """Query streams for latest image given a specific filter. Args: img_filter: array of filters as strings format 'key=value' Returns: dictionary with latest image information or empty """ def policy(content, path): # pylint: disable=W0613 """TODO.""" return s_util.read_signed(content, keyring=self.keyring_path) (url, path) = s_util.path_from_mirror_url(self.mirror_url, None) s_mirror = mirrors.UrlMirrorReader(url, policy=policy) config = {'filters': filters.get_filters(img_filter)} self._log.debug('looking for image with the following config:') self._log.debug(config) t_mirror = FilterMirror(config) t_mirror.sync(s_mirror, path) return t_mirror.json_entries
def mirror(output_d, source=IMAGE_SRC_URL, mirror_filters=None, max_items=1, keyring=KEYRING, verbosity=0): if mirror_filters is None: mirror_filters = [f for f in ITEM_NAME_FILTERS] filter_list = filters.get_filters(mirror_filters) (source_url, initial_path) = sutil.path_from_mirror_url(source, None) def policy(content, path): # pylint: disable=W0613 if initial_path.endswith('sjson'): return sutil.read_signed(content, keyring=keyring) else: return content smirror = mirrors.UrlMirrorReader(source_url, policy=policy) LOG.debug("summary: \n " + '\n '.join([ "source: %s" % source_url, "path: %s" % initial_path, "output: %s" % output_d, "filters: %s" % filter_list, ]) + '\n') mirror_config = {'max_items': max_items, 'filters': filter_list} tmirror = CurtinVmTestMirror(config=mirror_config, out_d=output_d, verbosity=verbosity) tmirror.sync(smirror, initial_path)
def test_ObjectFilterMirror_does_item_filter(self): src = get_mirror_reader("foocloud") filter_list = get_filters(['ftype!=disk1.img']) config = {'filters': filter_list} objectstore = MemoryObjectStore(None) target = ObjectFilterMirror(config, objectstore) target.sync(src, "streams/v1/index.json") unexpected = [f for f in objectstore.data if 'disk' in f] assert len(unexpected) == 0 assert len(objectstore.data) != 0
def images(s): center(s.__class__.__name__ + '.images') retval = [] # Get daily streams, change for releases (mirror_url, path) = util.path_from_mirror_url('https://cloud-images.ubuntu.com/daily/streams/v1/index.sjson', None) cdebug(' mirror_url: %s' % mirror_url) cdebug(' path: %s' % path) smirror = mirrors.UrlMirrorReader(mirror_url) # Change the content_id to find results for other clouds or for release images fl = [] fl.append('content_id=com.ubuntu.cloud:daily:%s' % s.cloud) if s.series is not None: fl.append('release=' + s.series) if s.region is not None: fl.append('region=' + s.region) filter_list = filters.get_filters(fl) cdebug(' fl: %s' % fl) tmirror = FilterMirror(config={'filters': filter_list}) try: tmirror.sync(smirror, path) try: # Find the latest version for i in tmirror.json_entries: # cdebug(i) cdebug(i['version_name']) versions = [item['version_name'] for item in tmirror.json_entries] versions = sorted(list(set(versions))) cdebug(versions) latest = versions[-1] items = [i for i in tmirror.json_entries if i['version_name'] == latest] for item in items: retval.append(item) except IndexError: pass # # Print a list of the regions represented in the filtered list # # as an example of extracting a list of unique keys from all items # regions = set([item['region'] for item in tmirror.json_entries]) # regions = sorted(list(regions)) # print('Regions: %s' % regions) except IOError: pass cleave(s.__class__.__name__ + '.images') return retval
def latest_cloud_image(release): """Download cloud image of specified release using simplestreams. This expects to find only a single image for the release for the specific day. @param release: string of Ubuntu release image to find @return: path to unique image for specified release """ LOG.info('finding pristine image for %s', (release)) mirror_url = 'https://cloud-images.ubuntu.com/daily' mirror_dir = '/srv/netplan/' keyring = '/usr/share/keyrings/ubuntu-cloudimage-keyring.gpg' (url, path) = s_util.path_from_mirror_url(mirror_url, None) ss_filter = filters.get_filters([ 'arch=%s' % system_architecture(), 'release=%s' % release, 'ftype=disk1.img' ]) mirror_config = { 'filters': ss_filter, 'keep_items': False, 'max_items': 1, 'checksumming_reader': True, 'item_download': True } def policy(content, path=None): # pylint: disable=unused-argument """Simplestreams policy function. @param content: signed content @param path: not used @return: policy for simplestreams """ return s_util.read_signed(content, keyring=keyring) smirror = mirrors.UrlMirrorReader(url, policy=policy) tstore = objectstores.FileStore(mirror_dir) tmirror = mirrors.ObjectFilterMirror(config=mirror_config, objectstore=tstore) tmirror.sync(smirror, path) search_d = os.path.join(mirror_dir, '**', release, '**', '*.img') images = [] for fname in glob.iglob(search_d, recursive=True): images.append(fname) if len(images) != 1: raise Exception('No unique images found') return images[0]
def query(mirror, max_items=1, filter_list=None, verbosity=0): if filter_list is None: filter_list = [] ifilters = filters.get_filters(filter_list) def fpath(path): return os.path.join(mirror, path) return next( (q for q in (query_ptree(sutil.load_content(util.load_file(fpath(path))), max_num=max_items, ifilters=ifilters, path2url=fpath) for path in VMTEST_CONTENT_ID_PATH_MAP.values() if os.path.exists(fpath(path))) if q), [])
def __init__(self, local_path, config=None, delete=False, item_filters=None, product_regex=PRODUCTS_REGEX): self.local_path = os.path.abspath(local_path) self.delete = delete # Any user specified filters such as arch~(amd64|i386) are in # addition to our selecting only tar.gz files. That's the only type # of file we know how to unpack. self.item_filters = item_filters or [] self.item_filters.append('ftype=tar.gz') self.item_filters = filters.get_filters(self.item_filters) self.product_filters = [ filters.ItemFilter('product_name~' + product_regex)] objectstore = objectstores.FileStore(self._simplestreams_path()) super(MAASMirrorWriter, self).__init__(config, objectstore)
def get_image(self, img_conf): """Get image using specified image configuration. @param img_conf: configuration for image @return_value: cloud_tests.images instance """ (url, path) = s_util.path_from_mirror_url(img_conf['mirror_url'], None) filter = filters.get_filters([ 'arch=%s' % c_util.get_dpkg_architecture(), 'release=%s' % img_conf['release'], 'ftype=disk1.img', ]) mirror_config = { 'filters': filter, 'keep_items': False, 'max_items': 1, 'checksumming_reader': True, 'item_download': True } def policy(content, path): return s_util.read_signed(content, keyring=img_conf['keyring']) smirror = mirrors.UrlMirrorReader(url, policy=policy) tstore = objectstores.FileStore(img_conf['mirror_dir']) tmirror = mirrors.ObjectFilterMirror(config=mirror_config, objectstore=tstore) tmirror.sync(smirror, path) search_d = os.path.join(img_conf['mirror_dir'], '**', img_conf['release'], '**', '*.img') images = [] for fname in glob.iglob(search_d, recursive=True): images.append(fname) if len(images) < 1: raise RuntimeError("No images found under '%s'" % search_d) if len(images) > 1: raise RuntimeError("Multiple images found in '%s': %s" % (search_d, ' '.join(images))) image = NoCloudKVMImage(self, img_conf, images[0]) return image
def get_image(self, img_conf): """Get image using specified image configuration. @param img_conf: configuration for image @return_value: cloud_tests.images instance """ (url, path) = s_util.path_from_mirror_url(img_conf['mirror_url'], None) filter = filters.get_filters([ 'arch=%s' % c_util.get_architecture(), 'release=%s' % img_conf['release'], 'ftype=disk1.img' ]) mirror_config = { 'filters': filter, 'keep_items': False, 'max_items': 1, 'checksumming_reader': True, 'item_download': True } def policy(content, path): return s_util.read_signed(content, keyring=img_conf['keyring']) smirror = mirrors.UrlMirrorReader(url, policy=policy) tstore = objectstores.FileStore(img_conf['mirror_dir']) tmirror = mirrors.ObjectFilterMirror(config=mirror_config, objectstore=tstore) tmirror.sync(smirror, path) search_d = os.path.join(img_conf['mirror_dir'], '**', img_conf['release'], '**', '*.img') images = [] for fname in glob.iglob(search_d, recursive=True): images.append(fname) if len(images) != 1: raise Exception('No unique images found') image = nocloud_kvm_image.NoCloudKVMImage(self, img_conf, images[0]) if img_conf.get('override_templates', False): image.update_templates(self.config.get('template_overrides', {}), self.config.get('template_files', {})) return image
def get_image(self, img_conf): """Get image using specified image configuration. @param img_conf: configuration for image @return_value: cloud_tests.images instance """ (url, path) = s_util.path_from_mirror_url(img_conf['mirror_url'], None) filter = filters.get_filters(['arch=%s' % c_util.get_architecture(), 'release=%s' % img_conf['release'], 'ftype=disk1.img']) mirror_config = {'filters': filter, 'keep_items': False, 'max_items': 1, 'checksumming_reader': True, 'item_download': True } def policy(content, path): return s_util.read_signed(content, keyring=img_conf['keyring']) smirror = mirrors.UrlMirrorReader(url, policy=policy) tstore = objectstores.FileStore(img_conf['mirror_dir']) tmirror = mirrors.ObjectFilterMirror(config=mirror_config, objectstore=tstore) tmirror.sync(smirror, path) search_d = os.path.join(img_conf['mirror_dir'], '**', img_conf['release'], '**', '*.img') images = [] for fname in glob.iglob(search_d, recursive=True): images.append(fname) if len(images) < 1: raise RuntimeError("No images found under '%s'" % search_d) if len(images) > 1: raise RuntimeError( "Multiple images found in '%s': %s" % (search_d, ' '.join(images))) image = NoCloudKVMImage(self, img_conf, images[0]) return image
def _query_streams(img_conf, img_filter): """Query streams for latest image given a specific filter. @param img_conf: configuration for image @param filters: array of filters as strings format 'key=value' @return: dictionary with latest image information or empty """ def policy(content, path): return s_util.read_signed(content, keyring=img_conf['keyring']) (url, path) = s_util.path_from_mirror_url(img_conf['mirror_url'], None) smirror = mirrors.UrlMirrorReader(url, policy=policy) config = {'max_items': 1, 'filters': filters.get_filters(img_filter)} tmirror = FilterMirror(config) tmirror.sync(smirror, path) try: return tmirror.json_entries[0] except IndexError: raise RuntimeError('no images found with filter: %s' % img_filter)