def test_glance_delete_all(self): self.common_start() self.assertTrue(glance.glance_import(os.devnull, name=self._IMG_NAME, diskformat='raw')) self.assertTrue(glance.glance_import(os.devnull, name=self._IMG_NAME, diskformat='raw')) self.assertTrue(glance.glance_exists(self._IMG_NAME)) self.assertTrue(glance.glance_delete_all(self._IMG_NAME)) self.assertFalse(glance.glance_exists(self._IMG_NAME))
def test_glance_diskformat(self): glance.glance_delete_all(utils.test_name(), quiet=True) ret = glance.glance_import(os.devnull, name=utils.test_name(), diskformat='notgood') self.assertFalse(ret) ret = glance.glance_import(os.devnull, name=utils.test_name()) self.assertFalse(ret) glance.glance_delete_all(utils.test_name(), quiet=True)
def test_glance_delete_all(self): self.common_start() self.assertTrue( glance.glance_import(os.devnull, name=self._IMG_NAME, diskformat='raw')) self.assertTrue( glance.glance_import(os.devnull, name=self._IMG_NAME, diskformat='raw')) self.assertTrue(glance.glance_exists(self._IMG_NAME)) self.assertTrue(glance.glance_delete_all(self._IMG_NAME)) self.assertFalse(glance.glance_exists(self._IMG_NAME))
def test_glance_ids_uuid(self): glance.glance_delete_all(utils.test_name(), quiet=True) ret = glance.glance_import(os.devnull, name=utils.test_name(), diskformat='raw') self.assertTrue(ret) img_uuids_name = list(glance.glance_ids(utils.test_name())) self.assertEqual(len(img_uuids_name), 1) img_uuid = img_uuids_name[0] img_uuids_uuid = list(glance.glance_ids(img_uuid)) self.assertEqual(img_uuids_uuid, img_uuids_name) glance.glance_delete_all(utils.test_name(), quiet=True)
def main(sys_argv=sys.argv[1:]): # Handle CLI arguments args = do_argparse(sys_argv) # Check glance availability early if not args.dryrun and not glance.glance_ok(): vprint('local glance command-line client problem') return False # Guess which mode are we operating in image_type = None desc = args.descriptor vprint('descriptor: ' + desc) if desc.startswith('http://') or desc.startswith('https://'): image_type = 'url' elif os.path.exists(desc): ext = os.path.splitext(desc)[1] if ext == '.xml': image_type = 'xml' elif ext == '.json': image_type = 'json' else: image_type = 'image' else: if args.cernlist: image_type = 'cern' elif len(desc) == 27: try: # This was assumed to be SLMP ID encoding format, apparently not #base64.decodestring(desc) image_type = 'market' except binascii.Error: vprint('probably invalid StratusLab marketplace ID:', desc) else: vprint('unknown descriptor') if image_type is None: vprint('Cannot guess mode of operation') return False vprint('Image type: ' + image_type) # Prepare VM image metadata if image_type == 'market': # Get xml metadata file from StratusLab marketplace metadata_url_base = 'https://marketplace.stratuslab.eu/marketplace/metadata/' sl_md_url = metadata_url_base + args.descriptor local_metadata_file = get_url(sl_md_url) if local_metadata_file is None: vprint('cannot get xml metadata file from StratuLab marketplace: ' + sl_md_url) return False else: vprint('downloaded xml metadata file from StratuLab marketplace: ' + sl_md_url) vprint('into local file: ' + local_metadata_file) meta = md.MetaStratusLabXml(local_metadata_file) elif image_type == 'cern': meta = md.MetaCern(args.cernlist, args.descriptor) elif image_type == 'json': meta = md.MetaStratusLabJson(args.descriptor) elif image_type == 'xml': meta = md.MetaStratusLabXml(args.descriptor) if image_type in ('image', 'url'): metadata = {'checksums': {}, 'format': 'raw'} else: metadata = meta.get_metadata() # Ensure we have something to work on if not metadata: vprint('Cannot retrieve metadata') return False # Retrieve image in a local file if image_type == 'image': # Already a local file local_image_file = args.descriptor else: # Download from network location if image_type in ('xml', 'json', 'market', 'cern'): url = metadata['location'] elif image_type == 'url': url = args.descriptor local_image_file = get_url(url) if not local_image_file or not os.path.exists(local_image_file): vprint('cannot download from: ' + url) return False vprint(local_image_file + ': downloaded image from: ' + url) # VM images are compressed, but checksums are for uncompressed files compressed = ('compression' in metadata and metadata['compression'] and metadata['compression'].lower() != 'none') if compressed: chext = '.' + metadata['compression'] decomp = decompressor.Decompressor(local_image_file, ext=chext) res, local_image_file = decomp.doit(delete=(not args.keeptemps)) if not res: vprint(local_image_file + ': cannot uncompress') return False vprint(local_image_file + ': uncompressed file') if image_type == 'image': base_name = os.path.basename(local_image_file) elif image_type == 'url': base_name = os.path.basename(urlsplit(url)[2]) # Choose VM image name name = args.name if name is None: if image_type in ('image', 'url'): name, ext = os.path.splitext(base_name) elif image_type in ('xml', 'json', 'market', 'cern'): name = meta.get_name() vprint(local_image_file + ': VM image name: ' + name) # Populate metadata message digests to be verified, from checksum files if args.sums_files: if image_type in ('xml', 'json', 'market', 'cern'): raise NotImplementedError else: base_fn = base_name re_chks_line = re.compile(r'(?P<digest>[a-zA-Z0-9]+)\s+(?P<filename>.+)') for sum_file in args.sums_files: if sum_file.startswith(('http://', 'https://')): local_sum_file = get_url(sum_file) if not local_sum_file or not os.path.exists(local_sum_file): vprint('cannot download from: ' + sum_file) return False vprint(local_sum_file + ': downloaded checksum file from: ' + sum_file) sum_file = local_sum_file with open(sum_file, 'rb') as sum_f: vprint(sum_file + ': loading checksums...') for line in sum_f: match = re_chks_line.match(line) if match and base_fn == match.group('filename'): vprint(sum_file + ': matched filenames: ' + base_fn + ' == ' + match.group('filename')) ret = add_checksum(match.group('digest'), metadata, overrides=True) if not ret: vprint(sum_file + ': cannot add_checksum(' + match.group('digest') + ')') return False # Populate metadata message digests to be verified, from CLI parameters if args.digests: digs = [dig for dig in args.digests.split(':') if dig] for dig in digs: ret = add_checksum(dig, metadata, overrides=True) if not ret: return False # Verify image size size_ok = True if 'bytes' in metadata: size_expected = int(metadata['bytes']) size_actual = os.path.getsize(local_image_file) size_ok = size_expected == size_actual if size_ok: vprint('%s: size: OK: %s' % (local_image_file, size_t(size_actual))) else: vprint('%s: size: expected: %d' % (local_image_file, size_expected)) vprint('%s: size: actual: %d' % (local_image_file, size_actual)) if not args.force: return False # Verify image checksums verified = len(metadata['checksums']) if not args.nocheck: verified = 0 if size_ok: if len(metadata['checksums']) > 0: vprint(local_image_file + ': verifying checksums') verified = check_digests(local_image_file, metadata, args.force) elif image_type not in ('xml', 'json', 'market', 'cern'): vprint(local_image_file + ': no checksum to verify (forgot "-s" CLI option ?)') else: vprint(local_image_file + ': no checksum to verify found in metadata...') else: if args.force: vprint(local_image_file + ': size differ, but forcing the use of recomputed md5') metadata['checksums'] = {'md5': '0' * 32} check_digests(local_image_file, metadata, args.force) else: vprint(local_image_file + ': size differ, not verifying checksums') # If image already exists, download it to backup directory prior to deleting if not args.dryrun and glance.glance_exists(name): if args.backupdir: backupdir = args.backupdir else: backupdir = os.environ.get('GLANCING_BACKUP_DIR', '/tmp/glancing') do_backup = True if not os.path.exists(backupdir): os.mkdir(backupdir) elif not os.path.isdir(backupdir): vprint(backupdir + ' exists but is not a directory, sorry ' 'cannot backup old images...') do_backup = False if do_backup: fn_local = os.path.join(backupdir, name) status = glance.glance_download(name, fn_local) if not status: return False glance.glance_delete(name, quiet=(not utils.get_verbose())) # Import image into glance if not args.dryrun: if (size_ok and len(metadata['checksums']) == verified) or args.force: vprint(local_image_file + ': importing into glance as "%s"' % str(name)) md5 = metadata['checksums'].get('md5', None) ret = glance.glance_import(local_image_file, md5, name, metadata['format']) if not ret: return False else: return False else: if not args.force and (not size_ok or not len(metadata['checksums']) == verified): return False # TODO: the following should be done even if something went wrong... # Keep downloaded image file if not image_type == 'image' and not args.keeptemps: vprint(local_image_file + ': deleting temporary file') os.remove(local_image_file) # That's all folks ! return True