Example #1
0
 def test_bad_signature_with_validate(self, config):
     # This is similar to the above, except that the .validate() API is
     # used instead.
     channels_json = os.path.join(self._tmpdir, 'channels.json')
     channels_asc = channels_json + '.asc'
     copy('gpg.channels_01.json', self._tmpdir, dst=channels_json)
     sign(channels_json, 'device-signing.gpg')
     # Verify the signature with the pubkey.
     with temporary_directory() as tmpdir:
         dst = os.path.join(tmpdir, 'image-signing.tar.xz')
         setup_keyring_txz('image-signing.gpg', 'image-master.gpg',
                           dict(type='image-signing'), dst)
         with Context(dst) as ctx:
             self.assertRaises(SignatureError, ctx.validate,
                               channels_asc, channels_json)
             config.skip_gpg_verification = True
             ctx.validate(channels_asc, channels_json)
Example #2
0
 def test_destination_image_master(self):
     # When a keyring is downloaded, we preserve its .tar.xz and
     # .tar.xz.asc files.
     setup_keyrings('archive-master')
     setup_keyring_txz(
         'image-master.gpg', 'archive-master.gpg',
         dict(type='image-master'),
         os.path.join(self._serverdir, 'gpg', 'image-master.tar.xz'))
     asc_path = config.gpg.image_master + '.asc'
     self.assertFalse(os.path.exists(config.gpg.image_master))
     self.assertFalse(os.path.exists(asc_path))
     get_keyring('image-master', 'gpg/image-master.tar.xz',
                 'archive-master')
     self.assertTrue(os.path.exists(config.gpg.image_master))
     self.assertTrue(os.path.exists(asc_path))
     with Context(config.gpg.archive_master) as ctx:
         self.assertTrue(ctx.verify(asc_path, config.gpg.image_master))
Example #3
0
 def test_bad_signature_with_multiple_keyrings(self):
     # The file is signed with the image master key, but it won't verify
     # against the image signing and device signing pubkeys.
     channels_json = os.path.join(self._tmpdir, 'channels.json')
     copy('gpg.channels_01.json', self._tmpdir, dst=channels_json)
     sign(channels_json, 'image-master.gpg')
     # Verify the signature with the pubkey.
     with temporary_directory() as tmpdir:
         keyring_1 = os.path.join(tmpdir, 'image-signing.tar.xz')
         keyring_2 = os.path.join(tmpdir, 'device-signing.tar.xz')
         setup_keyring_txz('image-signing.gpg', 'image-master.gpg',
                           dict(type='image-signing'), keyring_1)
         setup_keyring_txz('device-signing.gpg', 'image-signing.gpg',
                           dict(type='device-signing'), keyring_2)
         with Context(keyring_1, keyring_2) as ctx:
             self.assertFalse(
                 ctx.verify(channels_json + '.asc', channels_json))
Example #4
0
 def test_signature_invalid_due_to_blacklist(self):
     # Like above, but we put the device signing key id in the blacklist.
     channels_json = os.path.join(self._tmpdir, 'channels.json')
     copy('gpg.channels_01.json', self._tmpdir, dst=channels_json)
     sign(channels_json, 'device-signing.gpg')
     # Verify the signature with the pubkey.
     with temporary_directory() as tmpdir:
         keyring_1 = os.path.join(tmpdir, 'image-signing.tar.xz')
         keyring_2 = os.path.join(tmpdir, 'device-signing.tar.xz')
         blacklist = os.path.join(tmpdir, 'blacklist.tar.xz')
         setup_keyring_txz('image-signing.gpg', 'image-master.gpg',
                           dict(type='image-signing'), keyring_1)
         setup_keyring_txz('device-signing.gpg', 'image-signing.gpg',
                           dict(type='device-signing'), keyring_2)
         # We're letting the device signing pubkey stand in for a blacklist.
         setup_keyring_txz('device-signing.gpg', 'image-master.gpg',
                           dict(type='blacklist'), blacklist)
         # Get the keyring checksums now, because the files will get
         # deleted when the tmpdir context manager exits.
         keyring_checksums = []
         for path in (keyring_1, keyring_2):
             with open(path, 'rb') as fp:
                 checksum = hashlib.md5(fp.read()).hexdigest()
             keyring_checksums.append(checksum)
         with open(blacklist, 'rb') as fp:
             blacklist_checksum = hashlib.md5(fp.read()).hexdigest()
         with Context(keyring_1, keyring_2, blacklist=blacklist) as ctx:
             with self.assertRaises(SignatureError) as cm:
                 ctx.validate(channels_json + '.asc', channels_json)
     error = cm.exception
     basename = os.path.basename
     self.assertEqual(basename(error.signature_path), 'channels.json.asc')
     self.assertEqual(basename(error.data_path), 'channels.json')
     # The contents of the signature file are not predictable.
     with open(channels_json + '.asc', 'rb') as fp:
         checksum = hashlib.md5(fp.read()).hexdigest()
     self.assertEqual(error.signature_checksum, checksum)
     self.assertEqual(
         error.data_checksum, '715c63fecbf44b62f9fa04a82dfa7d29')
     basenames = [basename(path) for path in error.keyrings]
     self.assertEqual(basenames, ['image-signing.tar.xz',
                                  'device-signing.tar.xz'])
     self.assertEqual(basename(error.blacklist), 'blacklist.tar.xz')
     self.assertEqual(error.keyring_checksums, keyring_checksums)
     self.assertEqual(error.blacklist_checksum, blacklist_checksum)
Example #5
0
 def test_destination_device_signing(self):
     # When a keyring is downloaded, we preserve its .tar.xz and
     # .tar.xz.asc files.
     setup_keyrings('archive-master', 'image-master', 'image-signing')
     setup_keyring_txz(
         'device-signing.gpg', 'image-signing.gpg',
         dict(type='device-signing'),
         os.path.join(self._serverdir, 'stable', 'nexus7',
                      'device-signing.tar.xz'))
     asc_path = config.gpg.device_signing + '.asc'
     self.assertFalse(os.path.exists(config.gpg.device_signing))
     self.assertFalse(os.path.exists(asc_path))
     get_keyring('device-signing', 'stable/nexus7/device-signing.tar.xz',
                 'image-signing')
     self.assertTrue(os.path.exists(config.gpg.device_signing))
     self.assertTrue(os.path.exists(asc_path))
     with Context(config.gpg.image_signing) as ctx:
         self.assertTrue(ctx.verify(asc_path, config.gpg.device_signing))
Example #6
0
 def test_bad_signature(self, config):
     # In this case, the file is signed with the device key, so it will not
     # verify against the image signing key, unless the
     # --skip-gpg-verification flag is set.
     channels_json = os.path.join(self._tmpdir, 'channels.json')
     channels_asc = channels_json + '.asc'
     copy('gpg.channels_01.json', self._tmpdir, dst=channels_json)
     sign(channels_json, 'device-signing.gpg')
     # Verify the signature with the pubkey.
     with temporary_directory() as tmpdir:
         dst = os.path.join(tmpdir, 'image-signing.tar.xz')
         setup_keyring_txz('image-signing.gpg', 'image-master.gpg',
                           dict(type='image-signing'), dst)
         with Context(dst) as ctx:
             self.assertFalse(ctx.verify(channels_asc, channels_json))
             # But with the --skip-gpg-verification flag set, the verify
             # call returns success.
             config.skip_gpg_verification = True
             self.assertTrue(ctx.verify(channels_asc, channels_json))
Example #7
0
 def test_signature_error_logging(self):
     # The repr/str of the SignatureError should contain lots of useful
     # information that will make debugging easier.
     channels_json = os.path.join(self._tmpdir, 'channels.json')
     copy('gpg.channels_01.json', self._tmpdir, dst=channels_json)
     sign(channels_json, 'device-signing.gpg')
     # Verify the signature with the pubkey.
     tmpdir = self._stack.enter_context(temporary_directory())
     dst = os.path.join(tmpdir, 'image-signing.tar.xz')
     setup_keyring_txz('image-signing.gpg', 'image-master.gpg',
                       dict(type='image-signing'), dst)
     output = StringIO()
     with Context(dst) as ctx:
         try:
             ctx.validate(channels_json + '.asc', channels_json)
         except SignatureError:
             # For our purposes, log.exception() is essentially a wrapper
             # around this traceback call.  We don't really care about the
             # full stack trace though.
             e = sys.exc_info()
             traceback.print_exception(e[0], e[1], e[2],
                                       limit=0, file=output)
     # 2014-02-12 BAW: Yuck, but I can't get assertRegex() to work properly.
     for i, line in enumerate(output.getvalue().splitlines()):
         if i == 0:
             self.assertEqual(line, 'Traceback (most recent call last):')
         elif i == 1:
             self.assertEqual(line, 'systemimage.gpg.SignatureError: ')
         elif i == 2:
             self.assertTrue(line.startswith('    sig path :'))
         elif i == 3:
             self.assertTrue(line.endswith('/channels.json.asc'))
         elif i == 4:
             self.assertEqual(
                 line, '    data path: 715c63fecbf44b62f9fa04a82dfa7d29')
         elif i == 5:
             self.assertTrue(line.endswith('/channels.json'))
         elif i == 6:
             self.assertTrue(line.startswith('    keyrings :'))
         elif i == 7:
             self.assertTrue(line.endswith("/image-signing.tar.xz']"))
         elif i == 8:
             self.assertEqual(line, '    blacklist: no blacklist ')
Example #8
0
 def test_archive_master(self):
     # The archive master keyring contains the master key.  This a
     # persistent, mandatory, shipped, non-expiring key.
     setup_keyrings()
     with Context(config.gpg.archive_master) as ctx:
         # There is only one key in the master keyring.
         self.assertEqual(
             ctx.fingerprints,
             set(['289518ED3A0C4CFE975A0B32E0979A7EADE8E880']))
         self.assertEqual(
             ctx.key_ids,
             set(['E0979A7EADE8E880']))
         # Here is some useful information about the master key.
         self.assertEqual(len(ctx.keys), 1)
         master = ctx.keys[0]
         self.assertEqual(
             master['uids'],
             ['Ubuntu Archive Master Signing Key (TEST) '
              '<*****@*****.**>'])
Example #9
0
 def test_bad_signature_in_blacklist(self):
     # Like above, but we put the device signing key id in the blacklist.
     channels_json = os.path.join(self._tmpdir, 'channels.json')
     copy('gpg.channels_01.json', self._tmpdir, dst=channels_json)
     sign(channels_json, 'device-signing.gpg')
     # Verify the signature with the pubkey.
     with temporary_directory() as tmpdir:
         keyring_1 = os.path.join(tmpdir, 'image-signing.tar.xz')
         keyring_2 = os.path.join(tmpdir, 'device-signing.tar.xz')
         blacklist = os.path.join(tmpdir, 'blacklist.tar.xz')
         setup_keyring_txz('image-signing.gpg', 'image-master.gpg',
                           dict(type='image-signing'), keyring_1)
         setup_keyring_txz('device-signing.gpg', 'image-signing.gpg',
                           dict(type='device-signing'), keyring_2)
         # We're letting the device signing pubkey stand in for a blacklist.
         setup_keyring_txz('device-signing.gpg', 'image-master.gpg',
                           dict(type='blacklist'), blacklist)
         with Context(keyring_1, keyring_2, blacklist=blacklist) as ctx:
             self.assertFalse(
                 ctx.verify(channels_json + '.asc', channels_json))
Example #10
0
 def test_archive_image_masters_image_device_signing(self):
     # In addition to the above, there is also a device signing key which
     # downloaded files can also be signed with.  This key is also
     # persistent, mandatory, and shipped.  It is optional, so doesn't need
     # to exist, but it is also updated regularly and expires after one
     # month.
     setup_keyrings()
     keyrings = [
         config.gpg.archive_master,
         config.gpg.image_master,
         config.gpg.image_signing,
         config.gpg.device_signing,
         ]
     with Context(*keyrings) as ctx:
         # The context now knows about two keys.
         self.assertEqual(
             ctx.fingerprints,
             set(['289518ED3A0C4CFE975A0B32E0979A7EADE8E880',
                  '47691DEF271FB2B1FD3364513BC6AF1818E7F5FB',
                  'C5E39F07D159687BA3E82BD15A0DE8A4F1F1846F',
                  'C43D6575FDD935D2F9BC2A4669BC664FCB86D917']))
         self.assertEqual(
             ctx.key_ids,
             set(['E0979A7EADE8E880',
                  '3BC6AF1818E7F5FB',
                  '5A0DE8A4F1F1846F',
                  '69BC664FCB86D917']))
         # Here are all the available uids.
         uids = []
         for key in ctx.keys:
             uids.extend(key['uids'])
         self.assertEqual(uids, [
             'Ubuntu Archive Master Signing Key (TEST) '
                 '<*****@*****.**>',
             'Ubuntu System Image Master Signing Key (TEST) '
                 '<*****@*****.**>',
             'Ubuntu System Image Signing Key (TEST) '
                 '<*****@*****.**>',
             'Acme Phones, LLC Image Signing Key (TEST) '
                 '<*****@*****.**>',
             ])
Example #11
0
 def test_good_signature_not_in_blacklist(self):
     # We sign the file with the device signing key, and verify it against
     # the imaging signing and device signing keyrings.  In this case
     # though, we also have a blacklist keyring, but none of the keyids in
     # the blacklist match the keyid that the file was signed with.
     channels_json = os.path.join(self._tmpdir, 'channels.json')
     copy('gpg.channels_01.json', self._tmpdir, dst='channels.json')
     sign(channels_json, 'device-signing.gpg')
     # Verify the signature with the pubkey.
     with temporary_directory() as tmpdir:
         keyring_1 = os.path.join(tmpdir, 'image-signing.tar.xz')
         keyring_2 = os.path.join(tmpdir, 'device-signing.tar.xz')
         blacklist = os.path.join(tmpdir, 'blacklist.tar.xz')
         setup_keyring_txz('image-signing.gpg', 'image-master.gpg',
                           dict(type='image-signing'), keyring_1)
         setup_keyring_txz('device-signing.gpg', 'image-signing.gpg',
                           dict(type='device-signing'), keyring_2)
         setup_keyring_txz('spare.gpg', 'image-master.gpg',
                           dict(type='blacklist'), blacklist)
         with Context(keyring_1, keyring_2, blacklist=blacklist) as ctx:
             self.assertTrue(
                 ctx.verify(channels_json + '.asc', channels_json))
Example #12
0
 def _download_files(self):
     """Download and verify all the winning upgrade path's files."""
     # If there is a device-signing key, the files can be signed by either
     # that or the image-signing key.
     keyrings = [config.gpg.image_signing]
     if os.path.exists(config.gpg.device_signing):
         keyrings.append(config.gpg.device_signing)
     # Now, go through all the file records in the winning upgrade path.
     # If the data file has already been downloaded and it has a valid
     # signature file, then we can save some bandwidth by not downloading
     # it again.
     downloads = []
     signatures = []
     checksums = []
     # For the clean ups below, preserve recovery's log files.
     cache_dir = config.updater.cache_partition
     preserve = set((
         os.path.join(cache_dir, 'log'),
         os.path.join(cache_dir, 'last_log'),
     ))
     for image_number, filerec in iter_path(self.winner):
         # Re-pack for arguments to get_files() and to collate the
         # signature path and checksum for the downloadable file.
         dst = os.path.join(cache_dir, os.path.basename(filerec.path))
         asc = os.path.join(cache_dir, os.path.basename(filerec.signature))
         checksum = filerec.checksum
         self.files.append((dst, (image_number, filerec.order)))
         self.files.append((asc, (image_number, filerec.order)))
         # Check the existence and signature of the file.
         if _use_cached(dst, asc, keyrings, checksum, self.blacklist):
             preserve.add(dst)
             preserve.add(asc)
         else:
             # Add the data file, which has a checksum.
             downloads.append(
                 Record(urljoin(config.http_base, filerec.path), dst,
                        checksum))
             # Add the signature file, which does not have a checksum.
             downloads.append(
                 Record(urljoin(config.http_base, filerec.signature), asc))
             signatures.append((dst, asc))
             checksums.append((dst, checksum))
     # For any files we're about to download, we must make sure that none
     # of the destination file paths exist, otherwise the downloader will
     # throw exceptions.
     for record in downloads:
         safe_remove(record.destination)
     # Also delete cache partition files that we no longer need.
     for filename in os.listdir(cache_dir):
         path = os.path.join(cache_dir, filename)
         if path not in preserve:
             safe_remove(os.path.join(cache_dir, filename))
     # Now, download all missing or ill-signed files, providing logging
     # feedback on progress.  This download can be paused.  The downloader
     # should also signal when the file downloads have started.
     self.downloader.get_files(downloads,
                               pausable=True,
                               signal_started=True)
     with ExitStack() as stack:
         # Set things up to remove the files if a SignatureError gets
         # raised or if the checksums don't match.  If everything's okay,
         # we'll clear the stack before the context manager exits so none
         # of the files will get removed.
         for record in downloads:
             stack.callback(os.remove, record.destination)
         # Although we should never get there, if the downloading step
         # fails, clear out the self.files list so there's no possibilty
         # we'll try to move them later.
         stack.callback(setattr, self, 'files', [])
         # Verify the signatures on all the downloaded files.
         with Context(*keyrings, blacklist=self.blacklist) as ctx:
             for dst, asc in signatures:
                 ctx.validate(asc, dst)
         # Verify the checksums.
         for dst, checksum in checksums:
             with open(dst, 'rb') as fp:
                 got = calculate_signature(fp)
                 if got != checksum:
                     raise ChecksumError(dst, got, checksum)
         # Everything is fine so nothing needs to be cleared.
         stack.pop_all()
     log.info('all files available in {}', cache_dir)
     # Now, copy the files from the temporary directory into the location
     # for the upgrader.
     self._next.append(self._move_files)
Example #13
0
 def _get_channel(self, count):
     """Get and verify the channels.json file."""
     # If there is no image signing key, download one now.  Don't worry if
     # we have an out of date key; that will be handled elsewhere.  The
     # imaging signing must be signed by the image master key, which we
     # better already have an up-to-date copy of.
     image_signing = config.gpg.image_signing
     if not _use_cached_keyring(image_signing, image_signing + '.asc',
                                config.gpg.image_master):
         log.info('No valid image signing key found, downloading')
         get_keyring('image-signing', 'gpg/image-signing.tar.xz',
                     'image-master', self.blacklist)
     channels_url = urljoin(config.https_base, 'channels.json')
     channels_path = os.path.join(config.tempdir, 'channels.json')
     asc_url = urljoin(config.https_base, 'channels.json.asc')
     asc_path = os.path.join(config.tempdir, 'channels.json.asc')
     log.info('Looking for: {}', channels_url)
     with ExitStack() as stack:
         self.downloader.get_files([
             (channels_url, channels_path),
             (asc_url, asc_path),
         ])
         # Once we're done with them, we can remove these files.
         stack.callback(safe_remove, channels_path)
         stack.callback(safe_remove, asc_path)
         # The channels.json file must be signed with the SYSTEM IMAGE
         # SIGNING key.  There may or may not be a blacklist.
         ctx = stack.enter_context(
             Context(config.gpg.image_signing, blacklist=self.blacklist))
         try:
             ctx.validate(asc_path, channels_path)
         except SignatureError:
             # The signature on the channels.json file did not match.
             # Maybe there's a new image signing key on the server.  If
             # we've already downloaded a new image signing key, then
             # there's nothing more to do but raise an exception.
             # Otherwise, if a new key *is* found, retry the current step.
             if count > 0:
                 raise
             self._next.appendleft(self._get_signing_key)
             log.info('channels.json not properly signed')
             return
         # The signature was good.
         log.info('Local channels file: {}', channels_path)
         with open(channels_path, encoding='utf-8') as fp:
             self.channels = Channels.from_json(fp.read())
     # Locate the index file for the channel/device.
     try:
         channel = self.channels[config.channel]
     except KeyError:
         log.info('no matching channel: {}', config.channel)
         return
     log.info('got channel: {}', config.channel)
     try:
         device = channel.devices[config.device]
     except KeyError:
         log.info('no matching device: {}', config.device)
         return
     log.info('found channel/device entry: {}/{}', config.channel,
              config.device)
     # The next step will depend on whether there is a device keyring
     # available or not.  If there is, download and verify it now.
     keyring = getattr(device, 'keyring', None)
     if keyring:
         self._next.append(partial(self._get_device_keyring, keyring))
     self._next.append(partial(self._get_index, device.index))
Example #14
0
def get_keyring(keyring_type, urls, sigkr, blacklist=None):
    """Download, verify, and unpack a keyring.

    The keyring .tar.xz file and its signature file are downloaded.  The
    signature is verified against the keys in the signature keyring gpg
    file.  If this fails, a SignatureError is raised and the files are
    deleted.

    If this succeeds, the tar.xz is unpacked, which should produce a
    keyring.gpg file containing the keyring, and a keyring.json file
    describing the keyring.  We load up the json file and verify that
    the keyring 'type' matches the type parameter and that the 'expiry'
    key, which names a UTC UNIX epoch timestamp, has not yet expired.
    Also, the 'model' key is checked - it is optional in the json file,
    and when it's missing, it means it applies to any model.

    If any of these condition occurred, a KeyringError is raised and the
    files are deleted.

    Assuming everything checks out, the .gpg file is copied to the cache
    location for the unpacked keyring, and the downloaded .tar.xz and
    .tar.xz.asc files are moved into place.  All the other ancillary
    files are deleted.

    :param keyring_type: The type of keyring file to download.  This can be
        one of 'archive-master', 'image-master', 'image-signing',
        'device-signing', or 'blacklist'.
    :param url: Either a string naming the url to the source of the keyring
        .tar.xz file (in which case the url to the associated .asc file will
        be calculated), or a 2-tuple naming the .tar.xz and .tar.xz.asc files.
    :param sigkr: The local keyring file that should be used to verify the
        downloaded signature.
    :param blacklist: When given, this is the signature blacklist file.
    :raises SignatureError: when the keyring signature does not match.
    :raises KeyringError: when any of the other verifying attributes of the
        downloaded keyring fails.
    """
    # Calculate the urls to the .tar.xz and .asc files.
    if isinstance(urls, tuple):
        srcurl, ascurl = urls
    else:
        srcurl = urls
        ascurl = urls + '.asc'
    tarxz_src = urljoin(config.https_base, srcurl)
    ascxz_src = urljoin(config.https_base, ascurl)
    # Calculate the local paths to the temporary download files.  The
    # blacklist goes to the data partition and all the other files go to the
    # cache partition.
    dstdir = (config.updater.data_partition if keyring_type == 'blacklist' else
              config.updater.cache_partition)
    tarxz_dst = os.path.join(dstdir, 'keyring.tar.xz')
    ascxz_dst = tarxz_dst + '.asc'
    # Delete any files that were previously present.  The download manager
    # will raise an exception if it finds a file already there.
    safe_remove(tarxz_dst)
    safe_remove(ascxz_dst)
    with ExitStack() as stack:
        # Let FileNotFoundError percolate up.
        get_download_manager().get_files([
            (tarxz_src, tarxz_dst),
            (ascxz_src, ascxz_dst),
        ])
        stack.callback(os.remove, tarxz_dst)
        stack.callback(os.remove, ascxz_dst)
        signing_keyring = getattr(config.gpg, sigkr.replace('-', '_'))
        with Context(signing_keyring, blacklist=blacklist) as ctx:
            ctx.validate(ascxz_dst, tarxz_dst)
        # The signature is good, so now unpack the tarball, load the json file
        # and verify its contents.
        keyring_gpg = os.path.join(config.tempdir, 'keyring.gpg')
        keyring_json = os.path.join(config.tempdir, 'keyring.json')
        with tarfile.open(tarxz_dst, 'r:xz') as tf:
            tf.extractall(config.tempdir)
        stack.callback(os.remove, keyring_gpg)
        stack.callback(os.remove, keyring_json)
        with open(keyring_json, 'r', encoding='utf-8') as fp:
            data = json.load(fp)
        # Check the mandatory keys first.
        json_type = data['type']
        if keyring_type != json_type:
            raise KeyringError(
                'keyring type mismatch; wanted: {}, got: {}'.format(
                    keyring_type, json_type))
        # Check the optional keys next.
        json_model = data.get('model')
        if json_model not in (config.device, None):
            raise KeyringError(
                'keyring model mismatch; wanted: {}, got: {}'.format(
                    config.device, json_model))
        expiry = data.get('expiry')
        if expiry is not None:
            # Get our current timestamp in UTC.
            timestamp = datetime.now(tz=timezone.utc).timestamp()
            if expiry < timestamp:
                # We've passed the expiration date for this keyring.
                raise KeyringError('expired keyring timestamp')
        # Everything checks out.  We now have the generic keyring.tar.xz and
        # keyring.tar.xz.asc files inside the cache (or data, in the case of
        # the blacklist) partition, which is where they need to be for
        # recovery.
        #
        # These files need to be renamed to their actual <keyring-type>.tar.xz
        # and .asc file names.
        #
        # We also want copies of these latter files to live in /var/lib so
        # that we don't have to download them again if we don't need to.
        if keyring_type == 'blacklist':
            tarxz_path = os.path.join(config.updater.data_partition,
                                      'blacklist.tar.xz')
        else:
            tarxz_path = getattr(config.gpg, keyring_type.replace('-', '_'))
        ascxz_path = tarxz_path + '.asc'
        makedirs(os.path.dirname(tarxz_path))
        safe_remove(tarxz_path)
        safe_remove(ascxz_path)
        shutil.copy(tarxz_dst, tarxz_path)
        shutil.copy(ascxz_dst, ascxz_path)
        # For all keyrings, copy the extracted .gpg file to the tempdir.  We
        # will always fallback to this path to avoid unpacking the .tar.xz
        # file every single time.
        gpg_path = os.path.join(config.tempdir, keyring_type + '.gpg')
        shutil.copy(keyring_gpg, gpg_path)