def get_metadata(self): if not self.xml_obj: vprint('No XML object') return None nsp = StratusLabNS._NS_TO_URL_PREFIXES ret = self.data root = self.xml_obj.getroot() # StratusLab xml metadata files are not consistent: # if downloaded through the website XML button or directly # through the url, there's an additionnal "<metadata>" root tag # SL_URI_BASE: https://marketplace.stratuslab.eu/marketplace/metadata # Manually crafted URI # <SL_URI_BASE>/LHfKVPoHcv4oMirHU0KuOQc-TvI?media=xml # Button URI: # <SL_URI_BASE>/LHfKVPoHcv4oMirHU0KuOQc-TvI/<ENDORSER>/<DATE>?media=xml if root.tag == 'metadata': rdf = root.find('rdf:RDF', nsp) else: rdf = root desc = rdf.find('rdf:Description', nsp) for cksum in desc.findall('slreq:checksum', nsp): algo = cksum.find('slreq:algorithm', nsp) val = cksum.find('slreq:value', nsp) ret['checksums'][sl_to_hashlib(algo.text)] = val.text for key, val in StratusLabNS._RETKEY_TO_NS_PREFIXES.items(): if key == 'algorithm': continue mdkey = val + ':' + key node = desc.find(mdkey, nsp) if node is not None: ret[key] = node.text return ret
def get_vmlist(vmlist): '''Get list of StratusLab ID of images to download vmlist is a list of files containing VM image marketplace IDs ''' ret = set() for img_list_fn in vmlist: with open(img_list_fn, 'rb') as vmlist_f: for lineno, line in enumerate(vmlist_f): stripl = line.strip() # Ignore blank lines if not stripl: continue # Ignore commented lines if stripl.startswith('#'): continue if stripl in ret: vprint('Warning @ %s:%d: ignoring duplicated image "%s".' % (img_list_fn, lineno, stripl)) continue if ' ' in stripl: vprint('Warning @ %s:%d: ignoring line containing ' 'whitespace:\n%s' % (img_list_fn, lineno, stripl)) continue # Assume one valid image ID per line ret.add(stripl) return ret
def zip_opener(fname, _): '''Open a .zip file and return a file-like object for it''' vprint('Opening zip archive:' + fname) zipf = zipfile.ZipFile(fname, 'r') if len(zipf.namelist()) > 1: vprint('Archive contains more than one file: ' + fname) return zipf.open(zipf.namelist()[0])
def doit(self, delete=False): '''Decompress the file's data, in self.block_size chunks''' ret = True try: with self.opener(self.fin_name, 'rb') as fin: delout = False try: with open(self.fout_name, 'wb') as fout: try: utils.block_read_filedesc(fin, fout.write, self.block_size) except IOError as exc: delout = True ret = False if exc not in utils.Exceptions(IOError('Not a gzipped file')): raise exc if delout: vprint('Error happened: deleting output file') os.remove(self.fout_name) except IOError: ret = False except Exception: ret = False if ret and delete: os.remove(self.fin_name) return ret, self.fout_name
def do_argparse(sys_argv): desc_help = textwrap.dedent(''' Emulate md5sum, sha1sum, etc... as if all were run in parallel... ''') parser = argparse.ArgumentParser(description=desc_help, formatter_class=utils.AlmostRawFormatter) parser.add_argument('-d', '--directory', default='.', help='Directory where checksums files are output') parser.add_argument('-f', '--force', action='store_true', help='Overwrite checksum files') parser.add_argument('-v', '--verbose', action='store_true', help='Display additional information') parser.add_argument(dest='files', nargs='+', help='files to comute checksums of') args = parser.parse_args(sys_argv) if args.verbose: utils.set_verbose(True) vprint('verbose mode') return args
def do_argparse(sys_argv): '''Handle CLI options ''' parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-v', '--verbose', action='store_true', help='Display additional information') parser.add_argument('-l', '--vmlist', default=[], action='append', help='File containing a list of endorsed VM image ' 'marketplace IDs to upload in glance') parser.add_argument('-u', '--url', default=_DEFAULT_SL_MP_URL, help='Market place base URL (default should be OK)') args = parser.parse_args(sys_argv) if args.verbose: utils.set_verbose(True) vprint('verbose mode') return args
def glance_delete_ids(ids, quiet=False): ret = True for image_id in ids: if len(image_id) > 0: ret &= glance_delete(image_id, quiet=quiet) else: vprint('Error: attempting to delete image with empty ID') ret = False return ret
def main(args=sys.argv[1:]): '''Decompress all files given as CLI arguments''' utils.set_verbose(True) vprint('verbose mode') for fname in args: vprint('Decompressing archive: ' + fname) decomp = Decompressor(fname) decomp.doit() return True
def __init__(self, filename): super(MetaStratusLabXml, self).__init__() try: self.xml_obj = et.parse(filename) except et.ParseError: vprint('XML parse error') self.xml_obj = None except: vprint('Parsed OK, but still no XML object from:' + str(filename)) self.data = {'checksums': {}}
def upload_image(mpid, name, meta_file): '''Upload new image into glance registry, using metadata file content ''' vprint("Uploading new image: %s (%s)" % (mpid, name)) ret = glancing.main(['-v', '-n', name, meta_file]) # Invalidate glance image cache # TODO: maybe just add the new one global _GLANCE_IMAGES _GLANCE_IMAGES = None return ret
def validity_check(state): if not state.quantities['Volume'].is_equal(state.quantities['Outflow']): return False if not intra_edge_cases(state): return False for s in state.quantities: # for quantity in state[s]: magnitude = state.quantities[s].magnitude.val derivative = state.quantities[s].derivative.val # ASSUMPTION: Inflow cannot be zero AND become more negative if magnitude == MagnitudeValues.ZERO and derivative == DerivativeValues.MIN: tprint('{}({}, {}) is an invalid state'.format( s, magnitude, derivative)) return False # ASSUMPTION: Inflow cannot be maximum AND become more positive if magnitude == MagnitudeValues.MAX and derivative == DerivativeValues.MAX: tprint('{}({}, {}) is an invalid state'.format( s, magnitude, derivative)) return False # ASSUMPTION: Volume cannot be MAX AND become more positive if magnitude == MagnitudeValues.MAX and derivative == DerivativeValues.MAX: tprint('{}({}, {}) is an invalid state'.format( s, magnitude, derivative)) return False # ASSUMPTION: Volume cannot be ZERO AND become more negative if magnitude == MagnitudeValues.ZERO and derivative == DerivativeValues.MIN: tprint('{}({}, {}) is an invalid state'.format( s, magnitude, derivative)) return False # ASSUMPTION: Outflow cannot be MAX AND become more positive if magnitude == MagnitudeValues.MAX and derivative == DerivativeValues.MAX: tprint('{}({}, {}) is an invalid state'.format( s, magnitude, derivative)) return False # ASSUMPTION: Outflow cannot be ZERO AND become more negative if magnitude == MagnitudeValues.ZERO and derivative == DerivativeValues.MIN: tprint('{}({}, {}) is an invalid state'.format( s, magnitude, derivative)) return False tmp = 'STATE {} - '.format(state.id) for s in state.quantities: magnitude = state.quantities[s].magnitude.val derivative = state.quantities[s].derivative.val tmp += '{}({}, {}), '.format(s, magnitude, derivative) vprint('{} is a valid state'.format(tmp)) return True
def get_meta_file(mpid, metadata_url_base): '''Retrieve image metadata from StratusLab marketplace, in XML format ''' # Get XML metadata file from StratusLab marketplace url_meta = metadata_url_base + mpid fn_meta = glancing.get_url(url_meta) if not fn_meta: vprint("Cannot retrieve XML metadata from URL: " + url_meta) return None os.rename(fn_meta, fn_meta + '.xml') return fn_meta + '.xml'
def get_meta_file(mpid, metadata_url_base): '''Retrieve image metadata from StratusLab marketplace, in XML format ''' # Get XML metadata file from StratusLab marketplace if not metadata_url_base.endswith('/'): metadata_url_base += '/' url_meta = metadata_url_base + mpid fn_meta = glancing.get_url(url_meta) if not fn_meta: vprint("Cannot retrieve XML metadata from URL: " + url_meta) return None os.rename(fn_meta, fn_meta + '.xml') return fn_meta + '.xml'
def vprint_verbose(self, verbosity, test_name): v = utils.get_verbose() utils.set_verbose(verbosity) with utils.stringio() as output: expected = '' if verbosity: expected = test_name + ': TOTOTITI\n' with utils.redirect('stdout', output): utils.vprint('TOTOTITI', test_name) self.assertEqual(expected, output.getvalue()) self.assertEqual(expected, output.getvalue()) with self.assertRaises(ValueError): output.getvalue() utils.set_verbose(v)
def set_properties(mpid, new): '''Set image properties unconditionnally, accordingly to 'new' metadata ''' vprint("Setting initial image properties for: " + mpid) props = [] vprint("Setting name") props.extend(['--name', new['title']]) vprint("Setting version") props.extend(['--property', 'version=' + new['version']]) vprint("Setting mpid") props.extend(['--property', 'mpid=' + mpid]) ret = glance.glance_update(new['title'], *props) if not ret: vprint("Could not set properties for image: ", mpid) return ret
def multisum(digs, args): '''Emulate md5sum & its family, but computing all the message digests in parallel, only reading each file once. ''' data = collections.OrderedDict() for (filename, digests) in digs.iteritems(): for (hash_alg, digest) in digests.iteritems(): fname = hash_alg.upper() + 'SUMS' lst_files = data.get(fname, []) lst_files.append(digest + ' ' + filename + '\n') data[fname] = lst_files for (filename, lines) in data.iteritems(): vprint('Writing file: ' + filename) if not args.force and os.path.exists(filename): raise ValueError('ERROR: file already exists:', filename) filename = os.path.join(args.directory, filename) with open(filename, 'wb') as fout: fout.writelines(lines)
def ears_setup(): p = PyAudio() count = p.get_device_count() device = [i for i in range(count) if "Logitech" in p.get_device_info_by_index(i)["name"]][0] source = Microphone(device_index=device) # yup, I'm playing with the internals of this class. source.CHUNK = 512 source.RATE = 8000 source.CHANNELS = 1 try: source.__enter__() source.stream.stop_stream() except: vprint(1, "Microphone initialization failed.") source.__exit__() return source
def get_metadata(self): ret = self.data # StratusLab marketplace can answer with 200/OK but error encoded # in json response... if "rMsg" in self.json_obj: vprint('Bad JSON metadata') return None for val in self.json_obj.values(): for key in val: retkey = self._MDKEY_TO_RETKEY.get(key) value = val[key][0]['value'] if retkey: if retkey != 'algorithm': ret[retkey] = value else: ret['checksums'][sl_to_hashlib(value)] = ( val['http://mp.stratuslab.eu/slreq#value'][0] ['value']) return ret
def do_argparse(sys_argv): parser = argparse.ArgumentParser(description='Manage glance VM images') # Global options parser.add_argument('-v', '--verbose', action='store_true', help='display additional information') group = parser.add_mutually_exclusive_group() group.add_argument('-d', '--delete', dest='delete', metavar='NAME', nargs='+', help='delete all images with the same ' 'name as the specified VM') args = parser.parse_args(sys_argv) if args.verbose: utils.set_verbose(True) vprint('verbose mode') return args
def get_metadata(self): ret = self.data # StratusLab marketplace can answer with 200/OK but error encoded # in json response... if "rMsg" in self.json_obj: vprint('Bad JSON metadata') return None for val in self.json_obj.values(): for key in val: retkey = self._MDKEY_TO_RETKEY.get(key) value = val[key][0]['value'] if retkey: if retkey != 'algorithm': ret[retkey] = value else: ret['checksums'][sl_to_hashlib(value)] = ( val['http://mp.stratuslab.eu/slreq#value'] [0]['value']) return ret
def run(self): self.server_conn.settimeout(TIMEOUT_REQ_CONNECTION) try: self.server_conn.connect((EVERYTHING_SERVER_IP, ETP_PORT)) except SOCKET_TIMEOUT: print("[!] [SOCKET_TIMEOUT] No Connection with ETP-Server at " + str((EVERYTHING_SERVER_IP, ETP_PORT)), file=sys.stderr) self.close_connection() return except Exception as e: print("[!] No Connection with ETP-Server at %s. Error: %s" % (str( (EVERYTHING_SERVER_IP, ETP_PORT)), str(e)), file=sys.stderr) self.close_connection() return message_queues = {self.server_conn: Queue(), self.client_conn: Queue()} while self.conns: readable, writable, exceptional = select( self.conns, self.conns, [], TIMEOUT_CLIENT_CONNECTION) if not (readable or writable or exceptional): self.stop() for sck in readable: dst_sck = self.client_conn \ if sck is self.server_conn \ else self.server_conn data = recvall(sck) if not data: self.stop() break # Manipulating the Data: new_data = self.get_manipulated_data(dst_sck, data) message_queues[dst_sck].put(new_data) for sck in writable: if not message_queues[sck].empty(): next_msg = message_queues[sck].get_nowait() prefix = "[server2client]" if sck is self.server_conn: prefix = "[client2server]" vprint(prefix, next_msg) sck.send(next_msg)
def apply(session, status, count, interval, reverse, check): """Extract code cell features""" filters = [ NotebookAST.ast_others != "", ] if interval: filters += [ NotebookAST.repository_id >= interval[0], NotebookAST.repository_id <= interval[1], ] query = (session.query(NotebookAST).filter(*filters)) if count: print(query.count()) return if reverse: query = query.order_by( NotebookAST.repository_id.desc(), NotebookAST.id.desc(), ) else: query = query.order_by( NotebookAST.repository_id.asc(), NotebookAST.id.asc(), ) repository_id = None for ast in query: if check_exit(check): session.commit() vprint(0, 'Found .exit file. Exiting') return status.report() if ast.repository_id != repository_id: repository_id = ast.repository_id vprint(0, "Processing repository: {}".format(repository_id)) session.commit() vprint(1, 'Processing ast: {}'.format(ast)) ast.ast_extslice = ast.ast_others.count("ast_extslice") ast.ast_others = ast.ast_others.replace("ast_extslice", "").replace(",", "").strip() ast.ast_repr = ast.ast_others.count("ast_repr") ast.ast_others = ast.ast_others.replace("ast_repr", "").replace(",", "").strip() session.add(ast) vprint(2, "done") status.count += 1 session.commit()
def apply(session, status, skip_if_error, dry_run, list_repo, count, interval, reverse, check): """Clone removed files""" filters = [ Repository.processed.op("&")(consts.R_UNAVAILABLE_FILES) != 0, # files unavailable Repository.processed.op("&")(skip_if_error) == 0, ] if interval: filters += [ Repository.id >= interval[0], Repository.id <= interval[1], ] query = session.query(Repository).filter(*filters) if count: print(query.count()) return if reverse: query = query.order_by(Repository.id.desc()) else: query = query.order_by(Repository.id.asc()) for repository in query: if check_exit(check): vprint(0, "Found .exit file. Exiting") return status.report() status.count += 1 if list_repo: vprint(0, "Repository {}".format(repository)) vprint(1, "tar -xjf {} -C .".format(repository.zip_path)) continue vprint(0, "Cloning repository {}".format(repository)) with mount_basedir(): result = clone_repository(session, repository, skip_if_error, dry_run) vprint(1, result) session.commit()
def get_glance_images(): '''Get info about images already in glance Cache those informations to speedup subsequent calls ''' global _GLANCE_IMAGES if _GLANCE_IMAGES is None: _GLANCE_IMAGES = {} add_args = [] tenant_msg = 'Using %s environment variable to filter image list' if 'OS_TENANT_ID' in os.environ: vprint(tenant_msg % 'OS_TENANT_ID') add_args = ['--owner', os.environ['OS_TENANT_ID']] elif 'OS_TENANT_NAME' in os.environ: vprint(tenant_msg % 'OS_TENANT_NAME') cmd = ['keystone', 'tenant-get', os.environ['OS_TENANT_NAME']] status, _, out, _ = utils.run(cmd, out=True) if status: _, block, _, _ = openstack_out.parse_block(out) for prop, val in block: if prop == 'id': add_args = ['--owner', val] break for imgid in glance.glance_ids(None, *add_args): img = glance.glance_show(imgid) if img: vmmap = openstack_out.map_block(img) if 'mpid' in vmmap: vprint( ("Found 'mpid' property (%(mpid)s) already set on " + "image: %(id)s (%(name)s)") % vmmap) _GLANCE_IMAGES[vmmap['mpid']] = vmmap _GLANCE_IMAGES[vmmap['checksum']] = vmmap _GLANCE_IMAGES[vmmap['name']] = vmmap return _GLANCE_IMAGES
def glance_run(glance_cmd=None, glance_args=None, subcmd_args=None, **kwargs): cmd = list(_GLANCE_CMD) if glance_args is not None: cmd.extend(glance_args) # Handle site-specific parameters (for example: "--insecure") os_params = os.environ.get('OS_PARAMS', None) if os_params: cmd[1:1] = os_params.split() if glance_cmd is not None: cmd += [glance_cmd] if subcmd_args is not None: cmd.extend(subcmd_args) status, _, out, err = utils.run(cmd, out=True, err=True) if not status: if (not kwargs.get('quiet')) is True: err_msg = kwargs.get('err_msg', 'failed to run "%s"' % glance_cmd) vprint(err_msg) if glance_args is not None: vprint('glance_args: %s' % str(glance_args)) if subcmd_args is not None: vprint('subcmd_args: %s' % str(subcmd_args)) vprint_lines('stdout=' + (out or '')) vprint_lines('stderr=' + (err or '')) return None # This is for glance --version which outputs only to stderr... if kwargs.get('stderr'): return err return out
def glance_run(glance_cmd=None, glance_args=None, subcmd_args=None, **kwargs): cmd = list(_GLANCE_CMD) if glance_args is not None: cmd.extend(glance_args) # Handle site-specific parameters (for example: "--insecure") os_params = os.environ.get('OS_PARAMS', None) if os_params: cmd[1:1] = os_params.split() if glance_cmd is not None: cmd += [glance_cmd] if subcmd_args is not None: cmd.extend(subcmd_args) status, _, out, err = utils.run(cmd, out=True, err=True) if not status: if not kwargs.get('quiet') is True: err_msg = kwargs.get('err_msg', 'failed to run "%s"' % glance_cmd) vprint(err_msg) if glance_args is not None: vprint('glance_args: %s' % str(glance_args)) if subcmd_args is not None: vprint('subcmd_args: %s' % str(subcmd_args)) vprint_lines('stdout=' + out) vprint_lines('stderr=' + err) return None return out
def _refreshSSHLink(self, minSeconds=120, maxSeconds=600): # if there is a link, ensure it'll still live for minimum lifetime if os.path.exists(self.sshLink) and stat.S_ISSOCK(os.stat(self.sshLink).st_mode): if ( time.time() - self.socketTimestamp < maxSeconds-minSeconds ): return True # rotate socket self.socketIdNow = (self.socketIdNow + 1) % (math.ceil(1.0*maxSeconds/(maxSeconds-minSeconds)) + 1) self.sshLink = self.sshLinkBase+str(self.socketIdNow) self.socketArgsDef = " -o ControlMaster=auto -o ControlPath=" + self.sshLink + " " if os.path.exists(self.sshLink): os.remove(self.sshLink) # send a dummy background process over ssh to keep the connection going socketProc = self._SocketProcess("sleep %s" % maxSeconds) timeout = 0 while not os.path.exists(self.sshLink): time.sleep(0.5) timeout += 0.5 if timeout == 6: vprint("SSH socket still not available after 6 seconds...\n%s" % self.sshLink, level=1) vprint('Socket process: %s' % (socketProc.cmd), level=2) if timeout == 10: return False self.socketTimestamp = time.time() return self._secureSSHLink()
def recognize(source): r = Recognizer() source.stream.start_stream() audio = r.listen(source) source.stream.stop_stream() vprint(4, "Finished recording.") try: vprint(0, "You said " + r.recognize(audio)) except LookupError: vprint(0, "Could not understand audio")
def apply(session, status, count, interval, reverse, check): """Remove non zip files from compressed repositories""" filters = [ Repository.processed.op("&")(consts.R_COMPRESS_OK) != 0, # Were compressed ] if interval: filters += [ Repository.id >= interval[0], Repository.id <= interval[1], ] query = session.query(Repository).filter(*filters) if count: print(query.count()) return if reverse: query = query.order_by( Repository.id.desc() ) else: query = query.order_by( Repository.id.asc() ) for repository in query: if check_exit(check): vprint(0, "Found .exit file. Exiting") return status.report() with mount_basedir(): if repository.zip_path.exists() and repository.path.exists(): vprint(0, "Removing non zip files from {}".format(repository)) shutil.rmtree(str(repository.path), ignore_errors=True) session.add(repository) elif repository.path.exists(): vprint(0, "Zip not found for {}".format(repository)) repository.processed -= consts.R_COMPRESS_OK session.add(repository) elif not repository.zip_path.exists(): vprint(0, "Repository not found {}".format(repository)) repository.processed |= consts.R_UNAVAILABLE_FILES session.add(repository) status.count += 1 session.commit()
def process_repository(session, status, repository, query_iter): query_iter = list(query_iter) zip_path = None tarzip = None if not repository.path.exists(): if not repository.zip_path.exists(): repository.processed |= consts.R_UNAVAILABLE_FILES session.add(repository) status.count += len(query_iter) return "Failed. Repository not found: {}".format(repository) tarzip = tarfile.open(str(repository.zip_path)) zip_path = Path(repository.hash_dir2) shell = InteractiveShell.instance() group = groupby( query_iter, lambda x: (x[1]) ) for notebook, new_iter in group: cells = list(query_iter) vprint(1, "Processing notebook: {}. Found {} cells".format(notebook, len(cells))) name = notebook.name vprint(2, "Loading notebook file") if tarzip: notebook = nbf.read( tarzip.extractfile(tarzip.getmember(str(zip_path / name))), nbf.NO_CONVERT ) else: with open(str(repository.path / name)) as ofile: notebook = nbf.read(ofile, nbf.NO_CONVERT) notebook = nbf.convert(notebook, 4) metadata = notebook["metadata"] language_info = metadata.get("language_info", {}) language_name = language_info.get("name", "unknown") for cell, _, _ in new_iter: vprint(2, "Loading cell {}".format(cell.index)) index = int(cell.index) notebook_cell = notebook["cells"][index] source = notebook_cell.get("source", "") if language_name == "python" and notebook_cell.get("cell_type") == "code": try: source = shell.input_transformer_manager.transform_cell(source) except (IndentationError, SyntaxError): pass cell.source = source if cell.processed & consts.C_MARKED_FOR_EXTRACTION: cell.processed -= consts.C_MARKED_FOR_EXTRACTION session.add(cell) session.commit() return "ok"
def apply(session, status, use_compressed, count, interval, reverse, check): filters = [ Cell.processed.op('&')(consts.C_MARKED_FOR_EXTRACTION) != 0, Repository.processed.op("&")(use_compressed) == 0, ] if interval: filters += [ Repository.id >= interval[0], Repository.id <= interval[1], ] query = ( session.query(Cell, Notebook, Repository) .join(Notebook) .join(Repository) .filter(*filters) .order_by( Repository.id.asc(), Notebook.id.asc() ) ) if count: print(query.count()) return if reverse: query = query.order_by( Repository.id.desc(), Notebook.id.asc(), Cell.id.asc(), ) else: query = query.order_by( Repository.id.asc(), Notebook.id.asc(), Cell.id.asc(), ) group = groupby( query, lambda x: ( x[2] ) ) for repository, query_iter in group: if check_exit(check): vprint(0, "Found .exit file. Exiting") return status.report() vprint(0, "Processing repository: {}".format(repository)) with mount_basedir(): result = process_repository(session, status, repository, query_iter) vprint(1, result) session.commit()
def main(sys_argv=sys.argv[1:]): args = do_argparse(sys_argv) if args.delete: vprint('Trying to delete: "%s"' % str(args.delete)) return glance_delete_all(args.delete) else: vprint('Listing image IDs:') all_images_ids = glance_ids() if all_images_ids: for img_id in all_images_ids: print(img_id) else: vprint('Error: cannot list image IDs') return False return True
def doTransfer(self, listDescSourceTarget): for (desc, source, target) in listDescSourceTarget: if not self.smPaths: raise ConfigError("%s can't be transferred because '%s path wasn't set" % (desc, self.smOptPrefix)) for idx, sePath in enumerate(set(self.smPaths)): utils.vprint('Copy %s to SE %d ' % (desc, idx + 1), -1, newline = False) sys.stdout.flush() proc = se_copy(source, os.path.join(sePath, target), self.smForce) if proc.wait() == 0: utils.vprint('finished', -1) else: utils.vprint('failed', -1) utils.eprint(proc.getMessage()) utils.eprint('Unable to copy %s! You can try to copy it manually.' % desc) if not utils.getUserBool('Is %s (%s) available on SE %s?' % (desc, source, sePath), False): raise RuntimeError('%s is missing on SE %s!' % (desc, sePath))
def get_url(url): '''Retrieve content from URL into a temporary file. Return temporary file name. ''' if not url or not isinstance(url, (str, unicode)): return None try: url_f = urlopen(url) except HTTPError as exc: if exc.code == 404 and exc.reason == 'Not Found': vprint(str(exc)) return None raise exc except URLError as exc: vprint(str(exc)) return None except ValueError as exc: if exc.args[0] == 'unknown url type: %s' % url: # TODO: is this secure enough ? It should only be used for testing # metadata mode if not os.path.exists(url): return None else: url_f = open(url, 'r') if not url_f: vprint(str(exc)) return None else: raise exc with tempfile.NamedTemporaryFile(bufsize=4096, delete=False) as fout: try: utils.block_read_filedesc(url_f, fout.write, 4096) except IOError as exc: vprint('cannot write temp file: ' + fout.name) os.remove(fout.name) return None return fout.name
def add_checksum(dig, metadata, overrides=False): try: halg = multihash.len2hash(len(dig)) except KeyError: vprint('unrecognized digest: ' + dig) return False if halg in metadata['checksums']: if dig == metadata['checksums'][halg]: vprint('duplicate digest, computing only once: ' + dig) elif overrides: metadata['checksums'][halg] = dig else: vprint('conflicting digests: ' + dig + ':' + metadata['checksums'][halg]) return False else: metadata['checksums'][halg] = dig return True
def main(sys_argv=sys.argv[1:]): '''Download images specified in the given list & store them in glance ''' args = do_argparse(sys_argv) vprint('Image list(s): ' + str(args.vmlist)) if not args.vmlist: vprint('No image list specified') return False for img_list_file in args.vmlist: if not os.path.exists(img_list_file): vprint('Cannot access image list file: ' + img_list_file) return False vmlist = get_vmlist(args.vmlist) for vmid in vmlist: vmid = vmid.strip() if vmid: handle_vm(vmid, args.url) return True
def check_digests(local_image_file, metadata, replace_bads=False): verified = 0 hashes = metadata['checksums'] mhash = multihash.multihash_hashlib(hashes) mhash.hash_file(local_image_file) hds = mhash.hexdigests() for hashfn in sorted(hashes): digest_computed = hds[hashfn] digest_expected = hashes[hashfn] if digest_computed.lower() == digest_expected.lower(): verified += 1 vprint('%s: %s: OK' % (local_image_file, hashfn)) else: fmt_ex = (local_image_file, hashfn, digest_expected) fmt_co = (local_image_file, hashfn, digest_computed) vprint('%s: %s: expected: %s' % fmt_ex) vprint('%s: %s: computed: %s' % fmt_co) if replace_bads: hashes[hashfn] = digest_computed return verified
def do_argparse(sys_argv): desc_help = textwrap.dedent(''' Import VM images into OpenStack glance image registry. Verify checksum(s), image size, etc... Backup old images being replaced. ''') parser = argparse.ArgumentParser(description=desc_help, formatter_class=utils.AlmostRawFormatter) group = parser.add_mutually_exclusive_group() group.add_argument('-f', '--force', action='store_true', help=('Import image into glance even if checksum ' 'verification failed')) group.add_argument('-d', '--dry-run', dest='dryrun', action='store_true', help='Do not import image into glance') parser.add_argument('-D', '--no-checksum', action='store_true', help='Do not verify checksums', dest='nocheck') parser.add_argument('-v', '--verbose', action='store_true', help='Display additional information') parser.add_argument('-n', '--name', dest='name', default=None, help=('Name of the image in glance registry. Default ' 'value derived from image file name.')) parser.add_argument('-k', '--keep-temps', dest='keeptemps', action='store_true', help='Keep temporary files (VM image & other)') parser.add_argument('-c', '--cern-list', dest='cernlist', help='Image list from CERN, as a JSON file') parser.add_argument('-b', '--backup-dir', dest='backupdir', help='Backup already existing images in this directory') digests_help = ('''>>> A colon-separated list of message digests of the image. This overrides / complements the checksums that are present in metadata, if using the StratusLab marketplace. It also overrides checksum files loaded with -S or --sums-files. Algorithms are deduced from checksum lengths. For example, an MD5 (32 chars) and a SHA-1 (40 chars): "3bea57666cdead13f0ed4a91beef2b98:1b5229d5dad92bc6662553be01608af2180eafbe" ''') parser.add_argument('-s', '--sums', dest='digests', help=digests_help) digests_files_help = ('''>>> A message digest file to load. This overrides / complements the checksums that are present in metadata, if using the StratusLab marketplace. Algorithms are deduced from checksum lengths. ''') parser.add_argument('-S', '--sums-files', dest='sums_files', nargs='*', help=digests_files_help) descriptor_help = ('''>>> This can be: * a local file path: - to an image file in "raw" format - to a StratusLab marketplace metadata file in JSON or XML format * a StratusLab marketplace ID * a network location (URL) * a CERN image list ID, with the VM image list passed in as --cern-list The StratusLab marketplace is located here: https://marketplace.stratuslab.eu ''') parser.add_argument('descriptor', metavar='STRING', help=descriptor_help) args = parser.parse_args(sys_argv) if args.verbose: utils.set_verbose(True) vprint('verbose mode') return args
def main(sys_argv=sys.argv[1:]): # Handle CLI arguments args = do_argparse(sys_argv) # Check glance availability early if not args.dryrun and not glance.glance_ok(): vprint('local glance command-line client problem') return False # Guess which mode are we operating in image_type = None desc = args.descriptor vprint('descriptor: ' + desc) if desc.startswith('http://') or desc.startswith('https://'): image_type = 'url' elif os.path.exists(desc): ext = os.path.splitext(desc)[1] if ext == '.xml': image_type = 'xml' elif ext == '.json': image_type = 'json' else: image_type = 'image' else: if args.cernlist: image_type = 'cern' elif len(desc) == 27: try: # This was assumed to be SLMP ID encoding format, apparently not #base64.decodestring(desc) image_type = 'market' except binascii.Error: vprint('probably invalid StratusLab marketplace ID:', desc) else: vprint('unknown descriptor') if image_type is None: vprint('Cannot guess mode of operation') return False vprint('Image type: ' + image_type) # Prepare VM image metadata if image_type == 'market': # Get xml metadata file from StratusLab marketplace metadata_url_base = 'https://marketplace.stratuslab.eu/marketplace/metadata/' sl_md_url = metadata_url_base + args.descriptor local_metadata_file = get_url(sl_md_url) if local_metadata_file is None: vprint('cannot get xml metadata file from StratuLab marketplace: ' + sl_md_url) return False else: vprint('downloaded xml metadata file from StratuLab marketplace: ' + sl_md_url) vprint('into local file: ' + local_metadata_file) meta = md.MetaStratusLabXml(local_metadata_file) elif image_type == 'cern': meta = md.MetaCern(args.cernlist, args.descriptor) elif image_type == 'json': meta = md.MetaStratusLabJson(args.descriptor) elif image_type == 'xml': meta = md.MetaStratusLabXml(args.descriptor) if image_type in ('image', 'url'): metadata = {'checksums': {}, 'format': 'raw'} else: metadata = meta.get_metadata() # Ensure we have something to work on if not metadata: vprint('Cannot retrieve metadata') return False # Retrieve image in a local file if image_type == 'image': # Already a local file local_image_file = args.descriptor else: # Download from network location if image_type in ('xml', 'json', 'market', 'cern'): url = metadata['location'] elif image_type == 'url': url = args.descriptor local_image_file = get_url(url) if not local_image_file or not os.path.exists(local_image_file): vprint('cannot download from: ' + url) return False vprint(local_image_file + ': downloaded image from: ' + url) # VM images are compressed, but checksums are for uncompressed files compressed = ('compression' in metadata and metadata['compression'] and metadata['compression'].lower() != 'none') if compressed: chext = '.' + metadata['compression'] decomp = decompressor.Decompressor(local_image_file, ext=chext) res, local_image_file = decomp.doit(delete=(not args.keeptemps)) if not res: vprint(local_image_file + ': cannot uncompress') return False vprint(local_image_file + ': uncompressed file') if image_type == 'image': base_name = os.path.basename(local_image_file) elif image_type == 'url': base_name = os.path.basename(urlsplit(url)[2]) # Choose VM image name name = args.name if name is None: if image_type in ('image', 'url'): name, ext = os.path.splitext(base_name) elif image_type in ('xml', 'json', 'market', 'cern'): name = meta.get_name() vprint(local_image_file + ': VM image name: ' + name) # Populate metadata message digests to be verified, from checksum files if args.sums_files: if image_type in ('xml', 'json', 'market', 'cern'): raise NotImplementedError else: base_fn = base_name re_chks_line = re.compile(r'(?P<digest>[a-zA-Z0-9]+)\s+(?P<filename>.+)') for sum_file in args.sums_files: if sum_file.startswith(('http://', 'https://')): local_sum_file = get_url(sum_file) if not local_sum_file or not os.path.exists(local_sum_file): vprint('cannot download from: ' + sum_file) return False vprint(local_sum_file + ': downloaded checksum file from: ' + sum_file) sum_file = local_sum_file with open(sum_file, 'rb') as sum_f: vprint(sum_file + ': loading checksums...') for line in sum_f: match = re_chks_line.match(line) if match and base_fn == match.group('filename'): vprint(sum_file + ': matched filenames: ' + base_fn + ' == ' + match.group('filename')) ret = add_checksum(match.group('digest'), metadata, overrides=True) if not ret: vprint(sum_file + ': cannot add_checksum(' + match.group('digest') + ')') return False # Populate metadata message digests to be verified, from CLI parameters if args.digests: digs = [dig for dig in args.digests.split(':') if dig] for dig in digs: ret = add_checksum(dig, metadata, overrides=True) if not ret: return False # Verify image size size_ok = True if 'bytes' in metadata: size_expected = int(metadata['bytes']) size_actual = os.path.getsize(local_image_file) size_ok = size_expected == size_actual if size_ok: vprint('%s: size: OK: %s' % (local_image_file, size_t(size_actual))) else: vprint('%s: size: expected: %d' % (local_image_file, size_expected)) vprint('%s: size: actual: %d' % (local_image_file, size_actual)) if not args.force: return False # Verify image checksums verified = len(metadata['checksums']) if not args.nocheck: verified = 0 if size_ok: if len(metadata['checksums']) > 0: vprint(local_image_file + ': verifying checksums') verified = check_digests(local_image_file, metadata, args.force) elif image_type not in ('xml', 'json', 'market', 'cern'): vprint(local_image_file + ': no checksum to verify (forgot "-s" CLI option ?)') else: vprint(local_image_file + ': no checksum to verify found in metadata...') else: if args.force: vprint(local_image_file + ': size differ, but forcing the use of recomputed md5') metadata['checksums'] = {'md5': '0' * 32} check_digests(local_image_file, metadata, args.force) else: vprint(local_image_file + ': size differ, not verifying checksums') # If image already exists, download it to backup directory prior to deleting if not args.dryrun and glance.glance_exists(name): if args.backupdir: backupdir = args.backupdir else: backupdir = os.environ.get('GLANCING_BACKUP_DIR', '/tmp/glancing') do_backup = True if not os.path.exists(backupdir): os.mkdir(backupdir) elif not os.path.isdir(backupdir): vprint(backupdir + ' exists but is not a directory, sorry ' 'cannot backup old images...') do_backup = False if do_backup: fn_local = os.path.join(backupdir, name) status = glance.glance_download(name, fn_local) if not status: return False glance.glance_delete(name, quiet=(not utils.get_verbose())) # Import image into glance if not args.dryrun: if (size_ok and len(metadata['checksums']) == verified) or args.force: vprint(local_image_file + ': importing into glance as "%s"' % str(name)) md5 = metadata['checksums'].get('md5', None) ret = glance.glance_import(local_image_file, md5, name, metadata['format']) if not ret: return False else: return False else: if not args.force and (not size_ok or not len(metadata['checksums']) == verified): return False # TODO: the following should be done even if something went wrong... # Keep downloaded image file if not image_type == 'image' and not args.keeptemps: vprint(local_image_file + ': deleting temporary file') os.remove(local_image_file) # That's all folks ! return True
def update_properties(mpid, old, new): '''Update image properties as needed, accordingly to 'old' & 'new' metadata ''' vprint("Updating image properties: " + mpid) props = [] if old['name'] != new['title']: vprint("Updating name") props.extend(['--name', new['title']]) if old['version'] != new['version']: vprint("Updating version") props.extend(['--property', 'version=' + new['version']]) if ('mpid' not in old) or (old['mpid'] != mpid): vprint("Updating mpid") props.extend(['--property', 'mpid=' + mpid]) if props: if not glance.glance_update(old['id'], *props): vprint("Could not set image properties for: ", mpid) return False else: vprint("NO-OP: All properties have the right values") return True
def handle_vm(mpid, url): '''Handle one image given by its SL marketplace ID ''' vprint('Handle image with marketplace ID : %s' % mpid) meta_file = get_meta_file(mpid, url) if meta_file is None: return # TODO: delete meta_file to avoid filling /tmp new = metadata.MetaStratusLabXml(meta_file).get_metadata() vmmap = get_glance_images() if mpid in vmmap: vprint("Image is already in glance") needs_upgrade(mpid, vmmap[mpid], new, meta_file) # TODO: check other image properties, they should match perfectly else: vprint("No image with the same marketplace ID found in glance") new_md5 = new['checksums']['md5'] new_name = new['title'] new_ver = new['version'] if new_md5 in vmmap: vprint('An image with the same MD5 is already in glance') old = vmmap[new_md5] old_md5 = old['checksum'] old_name = old['name'] old_ver = old.get('version', None) diff = False # Check name if old_name != new_name: vprint("Names differ, old: %s, new: %s" % (old_name, new_name)) diff = True # Check Version if old_ver != new_ver: vprint("Versions differ, old: %s, new: %s" % (old_ver, new_ver)) diff = True # Which one is the good one ? Let the admin sort it out... if diff: diff_msg = "differ, but we don't know which is the good one" else: diff_msg = "look like the same images" vprint("They %s, ignoring..." % diff_msg) return elif new_name in vmmap: old = vmmap[new_name] old_md5 = old['checksum'] old_name = old['name'] old_ver = old['version'] vprint('An image with the same name is already in glance: ' + old_name) diff = False err_msg = "But %s differ, old: %s, new: %s" # Check MD5 if old_md5 != new_md5: vprint(err_msg % ("checksums", old_md5, new_md5)) diff = True # Check Version assert isinstance(old_ver, int) assert isinstance(new_ver, int) if old_ver != new_ver: vprint(err_msg % ("versions", old_ver, new_ver)) diff = True if old_ver > new_ver: vprint("Versions are going backwards, that's not good.") vprint("Ignoring, fix the image on the market place.") return # This should not happen, as it should already have been caught by # earlier MD5 checking... if not diff: vprint("Identical images, that should not happen, please report" " as a bug.") return if 'mpid' in old: vprint("Previous image has 'mpid' property set, " "keeping it as-is...") # Backup old image by renaming if not glance.glance_rename(old_name, old_name + '_old'): vprint('Cannot rename old image, aborting update...') return vprint("Previous image renamed to: " + old_name + '_old') ret = upload_image(mpid, new_name, meta_file) if ret: ret = set_properties(mpid, new) return ret
def get_glance_images(): '''Get info about images already in glance Cache those informations to speedup subsequent calls ''' global _GLANCE_IMAGES if _GLANCE_IMAGES is None: _GLANCE_IMAGES = {} add_args = [] tenant_msg = 'Using %s environment variable to filter image list' if 'OS_TENANT_ID' in os.environ: vprint(tenant_msg % 'OS_TENANT_ID') add_args = ['--owner', os.environ['OS_TENANT_ID']] elif 'OS_TENANT_NAME' in os.environ: vprint(tenant_msg % 'OS_TENANT_NAME') cmd = ['keystone', 'tenant-get', os.environ['OS_TENANT_NAME']] status, _, out, err = utils.run(cmd, out=True, err=True) if status: _, block, _, _ = openstack_out.parse_block(out) for prop, val in block: if prop == 'id': add_args = ['--owner', val] break else: vprint('Failed to run : %s' % ' '.join(cmd)) vprint('OUT: ' + str(out)) vprint('ERR: ' + str(err)) for imgid in glance.glance_ids(None, *add_args): img = glance.glance_show(imgid) if img: vmmap = openstack_out.map_block(img) if 'mpid' in vmmap: vprint(("Found 'mpid' property (%(mpid)s) already set on " + "image: %(id)s (%(name)s)") % vmmap) _GLANCE_IMAGES[vmmap['mpid']] = vmmap _GLANCE_IMAGES[vmmap['checksum']] = vmmap _GLANCE_IMAGES[vmmap['name']] = vmmap return _GLANCE_IMAGES
def glance_exists(name): if not isinstance(name, (str, unicode)): vprint('glance_exists(name=%s): name is not a string, but a %s' % (name, str(type(name)))) raise TypeError return len(glance_ids([name])) > 0