def tempdir(self): if self._tempdir is None: makedirs(self.system.tempdir) self._tempdir = self._resources.enter_context( temporary_directory(prefix='system-image-', dir=self.system.tempdir)) return self._tempdir
def setup_keyring_txz(keyring_src, signing_keyring, json_data, dst): """Set up the <keyring>.tar.xz and .asc files. The source keyring and json data is used to create a .tar.xz file and an associated .asc signature file. These are then copied to the given destination path name. :param keyring_src: The name of the source keyring (i.e. .gpg file), which should be relative to the test data directory. This will serve as the keyring.gpg file inside the tarball. :param signing_keyring: The name of the keyring to sign the resulting tarball with, again, relative to the test data directory. :param json_data: The JSON data dictionary, i.e. the contents of the keyring.json file inside the tarball. :param dst: The destination path of the .tar.xz file. For the resulting signature file, the .asc suffix will be automatically appended and copied next to the dst file. """ with temporary_directory() as tmpdir: copy(keyring_src, tmpdir, 'keyring.gpg') json_path = os.path.join(tmpdir, 'keyring.json') with open(json_path, 'w', encoding='utf-8') as fp: json.dump(json_data, fp) # Tar up the .gpg and .json files into a .tar.xz file. tarxz_path = os.path.join(tmpdir, 'keyring.tar.xz') with tarfile.open(tarxz_path, 'w:xz') as tf: tf.add(os.path.join(tmpdir, 'keyring.gpg'), 'keyring.gpg') tf.add(json_path, 'keyring.json') sign(tarxz_path, signing_keyring) # Copy the .tar.xz and .asc files to the proper directory under # the path the https server is vending them from. makedirs(os.path.dirname(dst)) shutil.copy(tarxz_path, dst) shutil.copy(tarxz_path + '.asc', dst + '.asc')
def setup_index(index, todir, keyring, write_callback=None): for image in get_index(index).images: for filerec in image.files: path = (filerec.path[1:] if filerec.path.startswith('/') else filerec.path) dst = os.path.join(todir, path) makedirs(os.path.dirname(dst)) if write_callback is None: contents = EMPTYSTRING.join( os.path.splitext(filerec.path)[0].split('/')) with open(dst, 'w', encoding='utf-8') as fp: fp.write(contents) else: write_callback(dst) # Sign with the specified signing key. sign(dst, keyring)
def _move_files(self): # The upgrader already has the archive-master, so we don't need to # copy it. The image-master, image-signing, and device-signing (if # there is one) keys go to the cache partition. They may already be # there if they had to be downloaded, but if not, they're in /var/lib # and now need to be copied to the cache partition. The blacklist # keyring, if there is one, should already exist in the data partition. cache_dir = config.updater.cache_partition makedirs(cache_dir) # Copy the keyring.tar.xz and .asc files. _copy_if_missing(config.gpg.image_master, cache_dir) _copy_if_missing(config.gpg.image_master + '.asc', cache_dir) _copy_if_missing(config.gpg.image_signing, cache_dir) _copy_if_missing(config.gpg.image_signing + '.asc', cache_dir) _copy_if_missing(config.gpg.device_signing, cache_dir) _copy_if_missing(config.gpg.device_signing + '.asc', cache_dir) # Issue the reboot. self._next.append(self._prepare_recovery)
def instrument(config, stack, cert_file): """Instrument the system for testing.""" # Ensure the destination directories exist. makedirs(config.updater.data_partition) makedirs(config.updater.cache_partition) # Patch the subprocess call to write the reboot command to a log # file which the testing parent process can open and read. safe_reboot = _ActionLog('reboot.log') stack.enter_context( patch('systemimage.apply.check_call', safe_reboot.write)) stack.enter_context( patch('systemimage.device.check_output', return_value='nexus7')) # If available, patch the PyCURL downloader to accept self-signed # certificates. if pycurl is not None: def self_sign(c): c.setopt(pycurl.CAINFO, cert_file) stack.enter_context(patch('systemimage.curl.make_testable', self_sign))
def _wrapper(self, function, ini_files, *args, **kws): start = 0 # It would be preferable to simply add a device='nexus7' argument, but that # causes 'decorator() takes 1 positional argument but 2 were given' device = kws.get('device', 'nexus7') with ExitStack() as resources: # Create the config.d directory and copy all the source ini files to # this directory in sequential order, interpolating in the temporary # tmp and var directories. config_d = resources.enter_context(temporary_directory()) temp_tmpdir = resources.enter_context(temporary_directory()) temp_vardir = resources.enter_context(temporary_directory()) for ini_file in ini_files: dst = os.path.join(config_d, '{:02d}_override.ini'.format(start)) start += 1 template = resource_bytes('systemimage.tests.data', ini_file).decode('utf-8') with atomic(dst) as fp: print(template.format(tmpdir=temp_tmpdir, vardir=temp_vardir), file=fp) # Patch the global configuration object so that it can be used # directly, which is good enough in most cases. Also patch the bit of # code that detects the device name. config = Configuration(config_d) resources.enter_context(patch('systemimage.config._config', config)) resources.enter_context( patch('systemimage.device.check_output', return_value=device)) # Make sure the cache_partition and data_partition exist. makedirs(config.updater.cache_partition) makedirs(config.updater.data_partition) # The method under test is allowed to specify some additional # keyword arguments, in order to pass some variables in from the # wrapper. signature = inspect.signature(function) if 'config_d' in signature.parameters: kws['config_d'] = config_d if 'config' in signature.parameters: kws['config'] = config # Call the function with the given arguments and return the result. return function(self, *args)
def main(): # If enabled, start code coverage collection as early as possible. # Parse arguments. parser = argparse.ArgumentParser( prog='system-image-dbus', description='Ubuntu System Image Upgrader DBus service') parser.add_argument('--version', action='version', version='system-image-dbus {}'.format(__version__)) parser.add_argument('-C', '--config', default=DEFAULT_CONFIG_D, action='store', metavar='DIRECTORY', help="""Use the given configuration directory instead of the default""") parser.add_argument('-v', '--verbose', default=0, action='count', help='Increase verbosity') # Hidden argument for special setup required by test environment. if instrument is not None: # pragma: no branch parser.add_argument('--testing', default=None, action='store', help=argparse.SUPPRESS) parser.add_argument('--self-signed-cert', default=None, action='store', help=argparse.SUPPRESS) args = parser.parse_args(sys.argv[1:]) try: config.load(args.config) except TypeError as error: parser.error('\nConfiguration directory not found: {}'.format(error)) assert 'parser.error() does not return' # pragma: no cover # Create the temporary directory if it doesn't exist. makedirs(config.system.tempdir) # Initialize the loggers. initialize(verbosity=args.verbose) log = logging.getLogger('systemimage') DBusGMainLoop(set_as_default=True) system_bus = dbus.SystemBus() # Ensure we're the only owner of this bus name. code = system_bus.request_name('com.canonical.SystemImage', dbus.bus.NAME_FLAG_DO_NOT_QUEUE) if code == dbus.bus.REQUEST_NAME_REPLY_EXISTS: # Another instance already owns this name. Exit. log.error('Cannot get exclusive ownership of bus name.') return 2 log.info('SystemImage dbus main loop starting [{}/{}]', config.channel, config.device) with ExitStack() as stack: loop = Loop() testing_mode = getattr(args, 'testing', None) if testing_mode: instrument(config, stack, args.self_signed_cert) config.dbus_service = get_service(testing_mode, system_bus, '/Service', loop) else: from systemimage.dbus import Service config.dbus_service = Service(system_bus, '/Service', loop) try: loop.run() except KeyboardInterrupt: # pragma: no cover log.info('SystemImage dbus main loop interrupted') except: # pragma: no cover log.exception('D-Bus loop exception') raise else: log.info('SystemImage dbus main loop exited')
def copy(filename, todir, dst=None): src = data_path(filename) dst = os.path.join(str(todir), filename if dst is None else dst) makedirs(os.path.dirname(dst)) shutil.copy(src, dst)
def main(): parser = argparse.ArgumentParser( prog='system-image-cli', description='Ubuntu System Image Upgrader') parser.add_argument('--version', action='version', version='system-image-cli {}'.format(__version__)) parser.add_argument('-C', '--config', default=DEFAULT_CONFIG_D, action='store', metavar='DIRECTORY', help="""Use the given configuration directory instead of the default""") parser.add_argument('-b', '--build', default=None, action='store', help="""Override the current build number just this once""") parser.add_argument('-c', '--channel', default=None, action='store', help="""Override the channel just this once. Use in combination with `--build 0` to switch channels.""") parser.add_argument('-d', '--device', default=None, action='store', help='Override the device name just this once') parser.add_argument('-f', '--filter', default=None, action='store', help="""Filter the candidate paths to contain only full updates or only delta updates. The argument to this option must be either `full` or `delta`""") parser.add_argument('-m', '--maximage', default=None, type=int, help="""After the winning upgrade path is selected, remove all images with version numbers greater than the given one. If no images remain in the winning path, the device is considered up-to-date.""") parser.add_argument('-g', '--no-apply', default=False, action='store_true', help="""Download (i.e. "get") all the data files and prepare for updating, but don't actually reboot the device into recovery to apply the update""") parser.add_argument('-i', '--info', default=False, action='store_true', help="""Show some information about the current device, including the current build number, device name and channel, then exit""") parser.add_argument('-n', '--dry-run', default=False, action='store_true', help="""Calculate and print the upgrade path, but do not download or apply it""") parser.add_argument('-v', '--verbose', default=0, action='count', help='Increase verbosity') parser.add_argument('--progress', default=[], action='append', help="""Add a progress meter. Available meters are: dots, logfile, and json. Multiple --progress options are allowed.""") parser.add_argument('-p', '--percentage', default=None, action='store', help="""Override the device's phased percentage value during upgrade candidate calculation.""") parser.add_argument('--list-channels', default=False, action='store_true', help="""List all available channels, then exit""") parser.add_argument('--factory-reset', default=False, action='store_true', help="""Perform a destructive factory reset and reboot. WARNING: this will wipe all user data on the device!""") parser.add_argument('--production-reset', default=False, action='store_true', help="""Perform a destructive production reset (similar to factory reset) and reboot. WARNING: this will wipe all user data on the device!""") parser.add_argument('--switch', default=None, action='store', metavar='CHANNEL', help="""Switch to the given channel. This is equivalent to `-c CHANNEL -b 0`.""") # Settings options. parser.add_argument('--show-settings', default=False, action='store_true', help="""Show all settings as key=value pairs, then exit""") parser.add_argument('--set', default=[], action='append', metavar='KEY=VAL', help="""Set a key and value in the settings, adding the key if it doesn't yet exist, or overriding its value if the key already exists. Multiple --set arguments can be given.""") parser.add_argument('--get', default=[], action='append', metavar='KEY', help="""Get the value for a key. If the key does not exist, a default value is returned. Multiple --get arguments can be given.""") parser.add_argument('--del', default=[], action='append', metavar='KEY', dest='delete', help="""Delete the key and its value. It is a no-op if the key does not exist. Multiple --del arguments can be given.""") parser.add_argument('--override-gsm', default=False, action='store_true', help="""When the device is set to only download over WiFi, but is currently on GSM, use this switch to temporarily override the update restriction. This switch has no effect when using the cURL based downloader.""") # Hidden system-image-cli only feature for testing purposes. LP: #1333414 parser.add_argument('--skip-gpg-verification', default=False, action='store_true', help=argparse.SUPPRESS) args = parser.parse_args(sys.argv[1:]) try: config.load(args.config) except (TypeError, FileNotFoundError): parser.error('\nConfiguration directory not found: {}'.format( args.config)) assert 'parser.error() does not return' # pragma: no cover if args.skip_gpg_verification: print("""\ WARNING: All GPG signature verifications have been disabled. Your upgrades are INSECURE.""", file=sys.stderr) config.skip_gpg_verification = True config.override_gsm = args.override_gsm # Perform factory and production resets. if args.factory_reset: factory_reset() # We should never get here, except possibly during the testing # process, so just return as normal. return 0 if args.production_reset: production_reset() # We should never get here, except possibly during the testing # process, so just return as normal. return 0 # Handle all settings arguments. They are mutually exclusive. if sum( bool(arg) for arg in (args.set, args.get, args.delete, args.show_settings)) > 1: parser.error('Cannot mix and match settings arguments') assert 'parser.error() does not return' # pragma: no cover if args.show_settings: rows = sorted(Settings()) for row in rows: print('{}={}'.format(*row)) return 0 if args.get: settings = Settings() for key in args.get: print(settings.get(key)) return 0 if args.set: settings = Settings() for keyval in args.set: key, val = keyval.split('=', 1) settings.set(key, val) return 0 if args.delete: settings = Settings() for key in args.delete: settings.delete(key) return 0 # Sanity check -f/--filter. if args.filter is None: candidate_filter = None elif args.filter == 'full': candidate_filter = full_filter elif args.filter == 'delta': candidate_filter = delta_filter else: parser.error('Bad filter type: {}'.format(args.filter)) assert 'parser.error() does not return' # pragma: no cover # Create the temporary directory if it doesn't exist. makedirs(config.system.tempdir) # Initialize the loggers. initialize(verbosity=args.verbose) log = logging.getLogger('systemimage') # We assume the cache_partition already exists, as does the /etc directory # (i.e. where the archive master key lives). # Command line overrides. Process --switch first since if both it and # -c/-b are given, the latter take precedence. if args.switch is not None: config.build_number = 0 config.channel = args.switch if args.build is not None: try: config.build_number = int(args.build) except ValueError: parser.error('-b/--build requires an integer: {}'.format( args.build)) assert 'parser.error() does not return' # pragma: no cover if args.channel is not None: config.channel = args.channel if args.device is not None: config.device = args.device if args.percentage is not None: config.phase_override = args.percentage if args.info: alias = getattr(config.service, 'channel_target', None) kws = dict( build_number=config.build_number, device=config.device, channel=config.channel, last_update=last_update_date(), ) if alias is None: template = """\ current build number: {build_number} device name: {device} channel: {channel} last update: {last_update}""" else: template = """\ current build number: {build_number} device name: {device} channel: {channel} alias: {alias} last update: {last_update}""" kws['alias'] = alias print(dedent(template).format(**kws)) # If there's additional version details, print this out now too. We # sort the keys in reverse order because we want 'ubuntu' to generally # come first. details = version_detail() for key in sorted(details, reverse=True): print('version {}: {}'.format(key, details[key])) return 0 DBusGMainLoop(set_as_default=True) if args.list_channels: state = State() try: state.run_thru('get_channel') except Exception: print( 'Exception occurred during channel search; ' 'see log file for details', file=sys.stderr) log.exception('system-image-cli exception') return 1 print('Available channels:') for key in sorted(state.channels): alias = state.channels[key].get('alias') if alias is None: print(' {}'.format(key)) else: print(' {} (alias for: {})'.format(key, alias)) return 0 state = State() state.candidate_filter = candidate_filter if args.maximage is not None: state.winner_filter = version_filter(args.maximage) for meter in args.progress: if meter == 'dots': state.downloader.callbacks.append(_DotsProgress().callback) elif meter == 'json': state.downloader.callbacks.append(_json_progress) elif meter == 'logfile': state.downloader.callbacks.append(_LogfileProgress(log).callback) else: parser.error('Unknown progress meter: {}'.format(meter)) assert 'parser.error() does not return' # pragma: no cover if args.dry_run: try: state.run_until('download_files') except Exception: print( 'Exception occurred during dry-run; ' 'see log file for details', file=sys.stderr) log.exception('system-image-cli exception') return 1 # Say -c <no-such-channel> was given. This will fail. if state.winner is None or len(state.winner) == 0: print('Already up-to-date') else: winning_path = [str(image.version) for image in state.winner] kws = dict(path=COLON.join(winning_path)) target_build = state.winner[-1].version if state.channel_switch is None: # We're not switching channels due to an alias change. template = 'Upgrade path is {path}' percentage = phased_percentage(config.channel, target_build) else: # This upgrade changes the channel that our alias is mapped # to, so include that information in the output. template = 'Upgrade path is {path} ({from} -> {to})' kws['from'], kws['to'] = state.channel_switch percentage = phased_percentage(kws['to'], target_build) print(template.format(**kws)) print('Target phase: {}%'.format(percentage)) return 0 else: # Run the state machine to conclusion. Suppress all exceptions, but # note that the state machine will log them. If an exception occurs, # exit with a non-zero status. log.info('running state machine [{}/{}]', config.channel, config.device) try: if args.no_apply: state.run_until('apply') else: list(state) except KeyboardInterrupt: # pragma: no cover return 0 except Exception as error: print('Exception occurred during update; see log file for details', file=sys.stderr) log.exception('system-image-cli exception') # This is a little bit of a hack because it's not generalized to # all values of --progress. But OTOH, we always want to log the # error, so --progress=logfile is redundant, and --progress=dots # doesn't make much sense either. Just just include some JSON # output if --progress=json was specified. if 'json' in args.progress: print(json.dumps(dict(type='error', msg=str(error)))) return 1 else: return 0 finally: log.info('state machine finished')
def get_keyring(keyring_type, urls, sigkr, blacklist=None): """Download, verify, and unpack a keyring. The keyring .tar.xz file and its signature file are downloaded. The signature is verified against the keys in the signature keyring gpg file. If this fails, a SignatureError is raised and the files are deleted. If this succeeds, the tar.xz is unpacked, which should produce a keyring.gpg file containing the keyring, and a keyring.json file describing the keyring. We load up the json file and verify that the keyring 'type' matches the type parameter and that the 'expiry' key, which names a UTC UNIX epoch timestamp, has not yet expired. Also, the 'model' key is checked - it is optional in the json file, and when it's missing, it means it applies to any model. If any of these condition occurred, a KeyringError is raised and the files are deleted. Assuming everything checks out, the .gpg file is copied to the cache location for the unpacked keyring, and the downloaded .tar.xz and .tar.xz.asc files are moved into place. All the other ancillary files are deleted. :param keyring_type: The type of keyring file to download. This can be one of 'archive-master', 'image-master', 'image-signing', 'device-signing', or 'blacklist'. :param url: Either a string naming the url to the source of the keyring .tar.xz file (in which case the url to the associated .asc file will be calculated), or a 2-tuple naming the .tar.xz and .tar.xz.asc files. :param sigkr: The local keyring file that should be used to verify the downloaded signature. :param blacklist: When given, this is the signature blacklist file. :raises SignatureError: when the keyring signature does not match. :raises KeyringError: when any of the other verifying attributes of the downloaded keyring fails. """ # Calculate the urls to the .tar.xz and .asc files. if isinstance(urls, tuple): srcurl, ascurl = urls else: srcurl = urls ascurl = urls + '.asc' tarxz_src = urljoin(config.https_base, srcurl) ascxz_src = urljoin(config.https_base, ascurl) # Calculate the local paths to the temporary download files. The # blacklist goes to the data partition and all the other files go to the # cache partition. dstdir = (config.updater.data_partition if keyring_type == 'blacklist' else config.updater.cache_partition) tarxz_dst = os.path.join(dstdir, 'keyring.tar.xz') ascxz_dst = tarxz_dst + '.asc' # Delete any files that were previously present. The download manager # will raise an exception if it finds a file already there. safe_remove(tarxz_dst) safe_remove(ascxz_dst) with ExitStack() as stack: # Let FileNotFoundError percolate up. get_download_manager().get_files([ (tarxz_src, tarxz_dst), (ascxz_src, ascxz_dst), ]) stack.callback(os.remove, tarxz_dst) stack.callback(os.remove, ascxz_dst) signing_keyring = getattr(config.gpg, sigkr.replace('-', '_')) with Context(signing_keyring, blacklist=blacklist) as ctx: ctx.validate(ascxz_dst, tarxz_dst) # The signature is good, so now unpack the tarball, load the json file # and verify its contents. keyring_gpg = os.path.join(config.tempdir, 'keyring.gpg') keyring_json = os.path.join(config.tempdir, 'keyring.json') with tarfile.open(tarxz_dst, 'r:xz') as tf: tf.extractall(config.tempdir) stack.callback(os.remove, keyring_gpg) stack.callback(os.remove, keyring_json) with open(keyring_json, 'r', encoding='utf-8') as fp: data = json.load(fp) # Check the mandatory keys first. json_type = data['type'] if keyring_type != json_type: raise KeyringError( 'keyring type mismatch; wanted: {}, got: {}'.format( keyring_type, json_type)) # Check the optional keys next. json_model = data.get('model') if json_model not in (config.device, None): raise KeyringError( 'keyring model mismatch; wanted: {}, got: {}'.format( config.device, json_model)) expiry = data.get('expiry') if expiry is not None: # Get our current timestamp in UTC. timestamp = datetime.now(tz=timezone.utc).timestamp() if expiry < timestamp: # We've passed the expiration date for this keyring. raise KeyringError('expired keyring timestamp') # Everything checks out. We now have the generic keyring.tar.xz and # keyring.tar.xz.asc files inside the cache (or data, in the case of # the blacklist) partition, which is where they need to be for # recovery. # # These files need to be renamed to their actual <keyring-type>.tar.xz # and .asc file names. # # We also want copies of these latter files to live in /var/lib so # that we don't have to download them again if we don't need to. if keyring_type == 'blacklist': tarxz_path = os.path.join(config.updater.data_partition, 'blacklist.tar.xz') else: tarxz_path = getattr(config.gpg, keyring_type.replace('-', '_')) ascxz_path = tarxz_path + '.asc' makedirs(os.path.dirname(tarxz_path)) safe_remove(tarxz_path) safe_remove(ascxz_path) shutil.copy(tarxz_dst, tarxz_path) shutil.copy(ascxz_dst, ascxz_path) # For all keyrings, copy the extracted .gpg file to the tempdir. We # will always fallback to this path to avoid unpacking the .tar.xz # file every single time. gpg_path = os.path.join(config.tempdir, keyring_type + '.gpg') shutil.copy(keyring_gpg, gpg_path)