def _get_blacklist_2(self): """Second try to get the blacklist.""" # Unlike the first attempt, if this one fails with a SignatureError, # there's nothing more we can do, so we let those percolate up. We # still catch FileNotFoundErrors because of the small window of # opportunity for the blacklist to have been removed between the first # attempt and the second. Since it doesn't cost us much, we might as # well be thorough. # # The first attempt must already have gotten us an image master key if # one was missing originally, so don't try that again. url = 'gpg/blacklist.tar.xz' try: log.info('Looking for blacklist again: {}', urljoin(config.https_base, url)) get_keyring('blacklist', url, 'image-master') except FileNotFoundError: log.info('No blacklist found on second attempt') else: # After successful download, the blacklist.tar.xz will be living # in the data partition. self.blacklist = os.path.join(config.updater.data_partition, 'blacklist.tar.xz') log.info('Local blacklist file: {}', self.blacklist) # See above. self._next.append(partial(self._get_channel, 0))
def test_bad_signature(self): # Both files are downloaded, but the signature does not match the # image-master key. setup_keyrings() # Use the spare key as the blacklist, signed by itself. Since this # won't match the image-signing key, the check will fail. server_path = os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz') setup_keyring_txz('spare.gpg', 'spare.gpg', dict(type='blacklist'), server_path) with self.assertRaises(SignatureError) as cm: get_keyring('blacklist', 'gpg/blacklist.tar.xz', 'image-master') error = cm.exception # The local file name will be keyring.tar.xz in the cache directory. basename = os.path.basename self.assertEqual(basename(error.data_path), 'keyring.tar.xz') self.assertEqual(basename(error.signature_path), 'keyring.tar.xz.asc') # The crafted blacklist.tar.xz file will have an unpredictable # checksum due to tarfile variablility. with open(server_path, 'rb') as fp: checksum = hashlib.md5(fp.read()).hexdigest() self.assertEqual(error.data_checksum, checksum) # The signature file's checksum is also unpredictable. with open(server_path + '.asc', 'rb') as fp: checksum = hashlib.md5(fp.read()).hexdigest() self.assertEqual(error.signature_checksum, checksum)
def test_good_path(self): # Everything checks out, with the simplest possible keyring.json. setup_keyrings('archive-master') setup_keyring_txz( 'spare.gpg', 'archive-master.gpg', dict(type='image-master'), os.path.join(self._serverdir, 'gpg', 'image-master.tar.xz')) get_keyring('image-master', 'gpg/image-master.tar.xz', 'archive-master') with Context(config.gpg.archive_master) as ctx: self.assertEqual(ctx.fingerprints, set(['289518ED3A0C4CFE975A0B32E0979A7EADE8E880']))
def test_path_blacklist(self): # Get the blacklist keyring. setup_keyrings('archive-master', 'image-master') setup_keyring_txz( 'spare.gpg', 'image-master.gpg', dict(type='blacklist'), os.path.join(self._serverdir, 'gpg/blacklist.tar.xz')) url = 'gpg/blacklist.tar.xz'.format(config.channel, config.device) get_keyring('blacklist', url, 'image-master') blacklist_path = os.path.join(config.tempdir, 'blacklist.tar.xz') with Context(blacklist_path) as ctx: self.assertEqual(ctx.fingerprints, set(['94BE2CECF8A5AF9F3A10E2A6526B7016C3D2FB44']))
def test_bad_json_type(self): # This type, while the signatures match, the keyring type in the # keyring.json file does not match. setup_keyrings() setup_keyring_txz( 'device-signing.gpg', 'image-master.gpg', dict(type='master'), os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz')) with self.assertRaises(KeyringError) as cm: get_keyring('blacklist', 'gpg/blacklist.tar.xz', 'image-master') self.assertEqual( cm.exception.message, 'keyring type mismatch; wanted: blacklist, got: master')
def test_bad_json_model(self): # Similar to above, but with a non-matching model name. setup_keyrings() setup_keyring_txz( 'device-signing.gpg', 'image-master.gpg', dict(type='blacklist', model='nexus0'), os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz')) with self.assertRaises(KeyringError) as cm: get_keyring('blacklist', 'gpg/blacklist.tar.xz', 'image-master') self.assertEqual( cm.exception.message, 'keyring model mismatch; wanted: nexus7, got: nexus0')
def test_good_path_model(self): # Everything checks out with the model specified. setup_keyrings() setup_keyring_txz( 'spare.gpg', 'archive-master.gpg', dict(type='image-master', model='nexus7'), os.path.join(self._serverdir, 'gpg', 'image-master.tar.xz')) get_keyring('image-master', 'gpg/image-master.tar.xz', 'archive-master') with Context(config.gpg.archive_master) as ctx: self.assertEqual(ctx.fingerprints, set(['289518ED3A0C4CFE975A0B32E0979A7EADE8E880']))
def test_path_device_signing_keyring(self): # Get the device signing keyring. setup_keyrings('archive-master', 'image-master', 'image-signing') setup_keyring_txz( 'spare.gpg', 'image-signing.gpg', dict(type='device-signing'), os.path.join(self._serverdir, 'gpg', 'stable', 'nexus7', 'device-signing.tar.xz')) url = 'gpg/{}/{}/device-signing.tar.xz'.format(config.channel, config.device) get_keyring('device-signing', url, 'image-signing') with Context(config.gpg.device_signing) as ctx: self.assertEqual(ctx.fingerprints, set(['94BE2CECF8A5AF9F3A10E2A6526B7016C3D2FB44']))
def test_good_path_expiry(self): # Everything checks out, with the expiration date specified. next_year = datetime.now(tz=timezone.utc) + timedelta(days=365) setup_keyrings('archive-master') setup_keyring_txz( 'spare.gpg', 'archive-master.gpg', dict(type='image-master', expiry=next_year.timestamp()), os.path.join(self._serverdir, 'gpg', 'image-master.tar.xz')) get_keyring('image-master', 'gpg/image-master.tar.xz', 'archive-master') with Context(config.gpg.archive_master) as ctx: self.assertEqual(ctx.fingerprints, set(['289518ED3A0C4CFE975A0B32E0979A7EADE8E880']))
def test_expired(self): # Similar to above, but the expiry key in the json names a utc # timestamp that has already elapsed. last_year = datetime.now(tz=timezone.utc) + timedelta(days=-365) setup_keyrings() setup_keyring_txz( 'device-signing.gpg', 'image-master.gpg', dict(type='blacklist', model='nexus7', expiry=last_year.timestamp()), os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz')) with self.assertRaises(KeyringError) as cm: get_keyring('blacklist', 'gpg/blacklist.tar.xz', 'image-master') self.assertEqual(cm.exception.message, 'expired keyring timestamp')
def test_destination_blacklist(self): # Like above, but the blacklist files end up in the temporary # directory, since it's never persistent. setup_keyrings('archive-master', 'image-master') setup_keyring_txz( 'spare.gpg', 'image-master.gpg', dict(type='blacklist'), os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz')) txz_path = os.path.join(config.updater.data_partition, 'blacklist.tar.xz') asc_path = txz_path + '.asc' self.assertFalse(os.path.exists(txz_path)) self.assertFalse(os.path.exists(asc_path)) get_keyring('blacklist', 'gpg/blacklist.tar.xz', 'image-master') self.assertTrue(os.path.exists(txz_path)) self.assertTrue(os.path.exists(asc_path)) with Context(config.gpg.image_master) as ctx: self.assertTrue(ctx.verify(asc_path, txz_path))
def test_destination_image_signing(self): # When a keyring is downloaded, we preserve its .tar.xz and # .tar.xz.asc files. setup_keyrings('archive-master', 'image-master') setup_keyring_txz( 'image-signing.gpg', 'image-master.gpg', dict(type='image-signing'), os.path.join(self._serverdir, 'gpg', 'image-signing.tar.xz')) asc_path = config.gpg.image_signing + '.asc' self.assertFalse(os.path.exists(config.gpg.image_signing)) self.assertFalse(os.path.exists(asc_path)) get_keyring('image-signing', 'gpg/image-signing.tar.xz', 'image-master') self.assertTrue(os.path.exists(config.gpg.image_signing)) self.assertTrue(os.path.exists(asc_path)) with Context(config.gpg.image_master) as ctx: self.assertTrue(ctx.verify(asc_path, config.gpg.image_signing))
def _get_signing_key(self): """Try to get and validate a new image signing key. If there isn't one, throw a SignatureError. """ try: # The image signing key must be signed by the image master. get_keyring('image-signing', 'gpg/image-signing.tar.xz', 'image-master', self.blacklist) except (FileNotFoundError, SignatureError, KeyringError): # No valid image signing key could be found. Don't chain this # exception. log.error('No valid image signing key found') raise # Retry the previous step, but signal to _get_channel() that if the # signature fails this time, it's an error. self._next.appendleft(partial(self._get_channel, 1))
def _get_master_key(self): """Try to get and validate a new image master key. If there isn't one, throw a SignatureError. """ try: log.info('Getting the image master key') # The image signing key must be signed by the archive master. get_keyring('image-master', 'gpg/image-master.tar.xz', 'archive-master', self.blacklist) except (FileNotFoundError, SignatureError, KeyringError): # No valid image master key could be found. log.error('No valid image master key found') raise # Retry the previous step. log.info('Installing new image master key to: {}', config.gpg.image_master) self._next.appendleft(self._get_blacklist_2)
def _get_blacklist_1(self): """First try to get the blacklist.""" # If there is no image master key, or if the signature on the key is # not valid, download one now. Don't worry if we have an out of date # key; that will be handled elsewhere. The archive master key better # be pre-installed (we cannot download it). Let any exceptions in # grabbing the image master key percolate up. image_master = config.gpg.image_master if not _use_cached_keyring(image_master, image_master + '.asc', config.gpg.archive_master): log.info('No valid image master key found, downloading') get_keyring('image-master', 'gpg/image-master.tar.xz', 'archive-master') # The only way to know whether there is a blacklist or not is to try # to download it. If it fails, there isn't one. url = 'gpg/blacklist.tar.xz' try: # I think it makes no sense to check the blacklist when we're # downloading a blacklist file. log.info('Looking for blacklist: {}'.format( urljoin(config.https_base, url))) get_keyring('blacklist', url, 'image-master') except SignatureError: log.exception('No signed blacklist found') # The blacklist wasn't signed by the system image master. Maybe # there's a new system image master key? Let's find out. self._next.appendleft(self._get_master_key) return except FileNotFoundError: # There is no blacklist. log.info('No blacklist found') else: # After successful download, the blacklist.tar.xz will be living # in the data partition. self.blacklist = os.path.join(config.updater.data_partition, 'blacklist.tar.xz') log.info('Local blacklist file: {}', self.blacklist) # This is the first time we're trying to get the channel.json file. # If this fails because signature is invalid, we'll try to download a # new image-signing key. Then we'll call _get_channel() again. self._next.append(partial(self._get_channel, 0))
def _get_channel(self, count): """Get and verify the channels.json file.""" # If there is no image signing key, download one now. Don't worry if # we have an out of date key; that will be handled elsewhere. The # imaging signing must be signed by the image master key, which we # better already have an up-to-date copy of. image_signing = config.gpg.image_signing if not _use_cached_keyring(image_signing, image_signing + '.asc', config.gpg.image_master): log.info('No valid image signing key found, downloading') get_keyring('image-signing', 'gpg/image-signing.tar.xz', 'image-master', self.blacklist) channels_url = urljoin(config.https_base, 'channels.json') channels_path = os.path.join(config.tempdir, 'channels.json') asc_url = urljoin(config.https_base, 'channels.json.asc') asc_path = os.path.join(config.tempdir, 'channels.json.asc') log.info('Looking for: {}', channels_url) with ExitStack() as stack: self.downloader.get_files([ (channels_url, channels_path), (asc_url, asc_path), ]) # Once we're done with them, we can remove these files. stack.callback(safe_remove, channels_path) stack.callback(safe_remove, asc_path) # The channels.json file must be signed with the SYSTEM IMAGE # SIGNING key. There may or may not be a blacklist. ctx = stack.enter_context( Context(config.gpg.image_signing, blacklist=self.blacklist)) try: ctx.validate(asc_path, channels_path) except SignatureError: # The signature on the channels.json file did not match. # Maybe there's a new image signing key on the server. If # we've already downloaded a new image signing key, then # there's nothing more to do but raise an exception. # Otherwise, if a new key *is* found, retry the current step. if count > 0: raise self._next.appendleft(self._get_signing_key) log.info('channels.json not properly signed') return # The signature was good. log.info('Local channels file: {}', channels_path) with open(channels_path, encoding='utf-8') as fp: self.channels = Channels.from_json(fp.read()) # Locate the index file for the channel/device. try: channel = self.channels[config.channel] except KeyError: log.info('no matching channel: {}', config.channel) return log.info('got channel: {}', config.channel) try: device = channel.devices[config.device] except KeyError: log.info('no matching device: {}', config.device) return log.info('found channel/device entry: {}/{}', config.channel, config.device) # The next step will depend on whether there is a device keyring # available or not. If there is, download and verify it now. keyring = getattr(device, 'keyring', None) if keyring: self._next.append(partial(self._get_device_keyring, keyring)) self._next.append(partial(self._get_index, device.index))
def _get_device_keyring(self, keyring): keyring_url = urljoin(config.https_base, keyring.path) asc_url = urljoin(config.https_base, keyring.signature) log.info('getting device keyring: {}', keyring_url) get_keyring('device-signing', (keyring_url, asc_url), 'image-signing', self.blacklist)