def load_products(self, path=None, content_id=None, remove=True): # overridden from ObjectStoreMirrorWriter # the reason is that we have copied here from trunk # is bug 1511364 which is not fixed in all ubuntu versions if content_id: try: dpath = self.products_data_path(content_id) return sutil.load_content(self.source(dpath).read()) except IOError as e: if e.errno != errno.ENOENT: raise except JSONDecodeError: jsonfile = os.path.join(self.out_d, dpath) sys.stderr.write("Decode error in:\n " "content_id=%s\n " "JSON filepath=%s\n" % (content_id, jsonfile)) if remove is True: sys.stderr.write("Removing offending file: %s\n" % jsonfile) util.del_file(jsonfile) sys.stderr.write("Trying to load products again...\n") sys.stderr.flush() return self.load_products(path=path, content_id=content_id, remove=False) raise if path: return {} raise TypeError("unable to load_products with no path")
def _moditem(src, path, pedigree, modfunc): # load the products data at 'path' in 'src' mirror, then call modfunc # on the data found at pedigree. and store the updated data. sobj = src.objectstore tree = util.load_content(sobj.source(path).read()) item = util.products_exdata(tree, pedigree, insert_fieldnames=False) util.products_set(tree, modfunc(item), pedigree) sobj.insert_content(path, util.dump_data(tree))
def sync(self, reader, path): content, payload = reader.read_json(path) data = util.load_content(payload) fmt = data.get("format", "UNSPECIFIED") if fmt == "products:1.0": return self.sync_products(reader, path, data, content) elif fmt == "index:1.0": return self.sync_index(reader, path, data, content) else: raise TypeError("Unknown format '%s' in '%s'" % (fmt, path))
def _get_data_content(path, data, content, reader): if content is None and path: _, content = reader.read(path) if isinstance(content, bytes): content = content.decode('utf-8') if data is None and content: data = util.load_content(content) if not data: raise ValueError("Data could not be loaded. " "Path or content is required") return (data, content)
def insert_products(self, path, target, content): if not self.store: return tree = copy.deepcopy(target) util.products_prune(tree, preserve_empty_products=True) # stop these items from copying up when we call condense sticky = ['ftype', 'md5', 'sha256', 'size', 'name', 'id'] # LP: #1329805. Juju expects these on the item. if self.config.get('sticky_endpoint_region', True): sticky += ['endpoint', 'region'] util.products_condense(tree, sticky=sticky) tsnow = util.timestamp() tree['updated'] = tsnow dpath = self._cidpath(tree['content_id']) LOG.info("writing data: %s", dpath) self.store.insert_content(dpath, util.dump_data(tree)) # now insert or update an index ipath = "streams/v1/index.json" try: index = util.load_content(self.store.source(ipath).read()) except IOError as exc: if exc.errno != errno.ENOENT: raise index = { "index": {}, 'format': 'index:1.0', 'updated': util.timestamp() } index['index'][tree['content_id']] = { 'updated': tsnow, 'datatype': 'image-ids', 'clouds': [{ 'region': self.region, 'endpoint': self.auth_url }], 'cloudname': self.cloudname, 'path': dpath, 'products': list(tree['products'].keys()), 'format': tree['format'], } LOG.info("writing data: %s", ipath) self.store.insert_content(ipath, util.dump_data(index))
def query(mirror, max_items=1, filter_list=None, verbosity=0): if filter_list is None: filter_list = [] ifilters = filters.get_filters(filter_list) def fpath(path): return os.path.join(mirror, path) return next( (q for q in (query_ptree(sutil.load_content(util.load_file(fpath(path))), max_num=max_items, ifilters=ifilters, path2url=fpath) for path in VMTEST_CONTENT_ID_PATH_MAP.values() if os.path.exists(fpath(path))) if q), [])
def load_product_output(output, content_id, fmt="serial_list"): # parse command output and return if fmt == "serial_list": # "line" format just is a list of serials that are present working = {'content_id': content_id, 'products': {}} for line in output.splitlines(): (product_id, version) = line.split(None, 1) if product_id not in working['products']: working['products'][product_id] = {'versions': {}} working['products'][product_id]['versions'][version] = {} return working elif fmt == "json": return util.load_content(output) return
def load_products(self, path=None, content_id=None): if content_id: try: dpath = self.products_data_path(content_id) with self.source(dpath) as source: return util.load_content(source.read()) except IOError as e: if e.errno != errno.ENOENT: raise if path: # we possibly have 'path' that we could read. # but returning that would indicate we have inserted all items # rather than just the list of items that were mirrored. # this is because the .data/ entry was missing. # thus, just return empty. return {} raise TypeError("unable to load_products with no path")
def __init__(self, config): if isinstance(config, str): config = util.load_content(config) check_config(config) super(CommandHookMirror, self).__init__(config=config)
def load_products(self, path): _, content = self.read_json(path) return util.load_content(content)
def load_products(self, path=None, content_id=None): """ Load metadata for all currently uploaded active images in Glance. Uses glance as the definitive store, but loads metadata from existing simplestreams indexes as well. """ my_cid = self.content_id # glance is the definitive store. Any data loaded from the store # is secondary. store_t = None if self.store: try: path = self._cidpath(my_cid) store_t = util.load_content(self.store.source(path).read()) except IOError as e: if e.errno != errno.ENOENT: raise if not store_t: store_t = empty_iid_products(my_cid) glance_t = empty_iid_products(my_cid) images = self.gclient.images.list() for image in images: if self.glance_api_version == "1": image = image.to_dict() props = image['properties'] else: props = copy.deepcopy(image) if image['owner'] != self.tenant_id: continue if props.get('content_id') != my_cid: continue if image.get('status') != "active": LOG.warn("Ignoring inactive image %s with status '%s'" % (image['id'], image.get('status'))) continue source_content_id = props.get('source_content_id') product = props.get('product_name') version = props.get('version_name') item = props.get('item_name') if not (version and product and item and source_content_id): LOG.warn("%s missing required fields" % image['id']) continue # get data from the datastore for this item, if it exists # and then update that with glance data (just in case different) try: item_data = util.products_exdata(store_t, ( product, version, item, ), include_top=False, insert_fieldnames=False) except KeyError: item_data = {} # If original simplestreams-metadata is stored on the image, # use that as well. if 'simplestreams_metadata' in props: simplestreams_metadata = json.loads( props.get('simplestreams_metadata')) else: simplestreams_metadata = {} item_data.update(simplestreams_metadata) item_data.update({'name': image['name'], 'id': image['id']}) if 'owner_id' not in item_data: item_data['owner_id'] = self.tenant_id util.products_set(glance_t, item_data, ( product, version, item, )) for product in glance_t['products']: glance_t['products'][product]['region'] = self.region glance_t['products'][product]['endpoint'] = self.auth_url return glance_t