コード例 #1
0
 def test_always_downloads_gsm_allowed(self, config_d):
     # GSM downloads are allowed when always downloading.
     config = Configuration(config_d)
     Settings(config).set('auto_download', '2')
     get_download_manager().get_files(_http_pathify([
         ('channel.channels_05.json', 'channels.json')
         ]))
     self.assertTrue(self._gsm_set_flag)
     self.assertTrue(self._gsm_get_flag)
コード例 #2
0
 def test_wifi_downloads_gsm_disallowed(self, config_d):
     # Obviously GSM downloads are not allowed when downloading
     # automatically on wifi-only.
     config = Configuration(config_d)
     Settings(config).set('auto_download', '1')
     get_download_manager().get_files(_http_pathify([
         ('channel.channels_05.json', 'channels.json')
         ]))
     self.assertFalse(self._gsm_set_flag)
     self.assertFalse(self._gsm_get_flag)
コード例 #3
0
 def test_manual_downloads_gsm_allowed(self, config_d):
     # When auto_download is 0, manual downloads are enabled so assuming
     # the user knows what they're doing, GSM downloads are allowed.
     config = Configuration(config_d)
     Settings(config).set('auto_download', '0')
     get_download_manager().get_files(_http_pathify([
         ('channel.channels_05.json', 'channels.json')
         ]))
     self.assertTrue(self._gsm_set_flag)
     self.assertTrue(self._gsm_get_flag)
コード例 #4
0
 def test_get_downloader_forced_curl(self):
     # Setting SYSTEMIMAGE_PYCURL envar to 1, yes, or true forces the
     # PyCURL downloader.
     with reset_envar('SYSTEMIMAGE_PYCURL'):
         os.environ['SYSTEMIMAGE_PYCURL'] = '1'
         self.assertIsInstance(get_download_manager(), CurlDownloadManager)
     with reset_envar('SYSTEMIMAGE_PYCURL'):
         os.environ['SYSTEMIMAGE_PYCURL'] = 'tRuE'
         self.assertIsInstance(get_download_manager(), CurlDownloadManager)
     with reset_envar('SYSTEMIMAGE_PYCURL'):
         os.environ['SYSTEMIMAGE_PYCURL'] = 'YES'
         self.assertIsInstance(get_download_manager(), CurlDownloadManager)
コード例 #5
0
 def test_good_path(self):
     # The HTTPS server has a valid self-signed certificate, so downloading
     # over https succeeds.
     with ExitStack() as stack:
         stack.push(make_http_server(
             self._directory, 8943, 'cert.pem', 'key.pem'))
         get_download_manager().get_files(_https_pathify([
             ('channel.channels_05.json', 'channels.json'),
             ]))
         self.assertEqual(
             set(os.listdir(config.tempdir)),
             set(['channels.json']))
コード例 #6
0
 def test_get_downloader_forced_udm(self):
     # Setting SYSTEMIMAGE_PYCURL envar to anything else forces the udm
     # downloader.
     with reset_envar('SYSTEMIMAGE_PYCURL'):
         os.environ['SYSTEMIMAGE_PYCURL'] = '0'
         self.assertIsInstance(get_download_manager(), UDMDownloadManager)
     with reset_envar('SYSTEMIMAGE_PYCURL'):
         os.environ['SYSTEMIMAGE_PYCURL'] = 'false'
         self.assertIsInstance(get_download_manager(), UDMDownloadManager)
     with reset_envar('SYSTEMIMAGE_PYCURL'):
         os.environ['SYSTEMIMAGE_PYCURL'] = 'nope'
         self.assertIsInstance(get_download_manager(), UDMDownloadManager)
コード例 #7
0
 def test_mismatched_checksums(self):
     # A download that duplicates the destination location, but for which
     # the checksums don't match, is not allowed.
     content = b'x' * 100
     checksum = sha256(content).hexdigest()
     with open(os.path.join(self._serverdir, 'source.dat'), 'wb') as fp:
         fp.write(content)
     downloader = get_download_manager()
     url = urljoin(config.http_base, 'source.dat')
     downloads = [
         Record(url, 'local.dat', checksum),
         # Mutate the checksum so they won't match.
         Record(url, 'local.dat', checksum[-1] + checksum[:-1]),
         ]
     with self.assertRaises(DuplicateDestinationError) as cm:
         downloader.get_files(downloads)
     self.assertEqual(len(cm.exception.duplicates), 1)
     dst, dupes = cm.exception.duplicates[0]
     self.assertEqual(os.path.basename(dst), 'local.dat')
     self.assertEqual([r[0] for r in dupes],
                      ['http://localhost:8980/source.dat',
                       'http://localhost:8980/source.dat'])
     # The records in the exception aren't sorted by checksum.
     self.assertEqual(
         sorted(r[2] for r in dupes),
         ['09ecb6ebc8bcefc733f6f2ec44f791abeed6a99edf0cc31519637898aebd52d8'
          ,
          '809ecb6ebc8bcefc733f6f2ec44f791abeed6a99edf0cc31519637898aebd52d'
          ])
     self.assertEqual(os.listdir(config.tempdir), [])
コード例 #8
0
 def test_cancel(self):
     # Try to cancel the download of a big file.
     self.assertEqual(os.listdir(config.tempdir), [])
     with ExitStack() as stack:
         serverdir = stack.enter_context(temporary_directory())
         stack.push(make_http_server(serverdir, 8980))
         # Create a couple of big files to download.
         write_bytes(os.path.join(serverdir, 'bigfile_1.dat'), 10)
         write_bytes(os.path.join(serverdir, 'bigfile_2.dat'), 10)
         # The download service doesn't provide reliable cancel
         # granularity, so instead, we mock the 'started' signal to
         # immediately cancel the download.
         downloader = get_download_manager()
         def cancel_on_start(self, signal, path, started):
             if started:
                 downloader.cancel()
         stack.enter_context(patch(
             'systemimage.udm.DownloadReactor._do_started',
             cancel_on_start))
         self.assertRaises(
             Canceled, downloader.get_files, _http_pathify([
                 ('bigfile_1.dat', 'bigfile_1.dat'),
                 ('bigfile_2.dat', 'bigfile_2.dat'),
                 ]))
         self.assertEqual(os.listdir(config.tempdir), [])
コード例 #9
0
 def test_https_cert_not_in_capath(self):
     # The self-signed certificate fails because it's not in the system's
     # CApath (no known-good CA).
     with make_http_server(self._directory, 8943, 'cert.pem', 'key.pem'):
         self.assertRaises(
             FileNotFoundError,
             get_download_manager().get_files,
             _https_pathify([
                 ('channel.channels_05.json', 'channels.json'),
                 ]))
コード例 #10
0
 def test_auto_detect_udm(self):
     # If the environment variable is not set, we do auto-detection.  For
     # backward compatibility, if udm is available on the system bus, we
     # use it.
     with reset_envar('SYSTEMIMAGE_PYCURL'):
         if 'SYSTEMIMAGE_PYCURL' in os.environ:
             del os.environ['SYSTEMIMAGE_PYCURL']
         with patch('dbus.SystemBus.get_object') as mock:
             self.assertIsInstance(
                 get_download_manager(), UDMDownloadManager)
         mock.assert_called_once_with(DOWNLOADER_INTERFACE, '/')
コード例 #11
0
 def test_bad_host(self):
     # The HTTPS server has a certificate with a non-matching hostname
     # (mocked so that its CA is in the system's trusted path).
     with ExitStack() as stack:
         stack.push(make_http_server(
             self._directory, 8943, 'nasty_cert.pem', 'nasty_key.pem'))
         self.assertRaises(
             FileNotFoundError,
             get_download_manager().get_files,
             _https_pathify([
                 ('channel.channels_05.json', 'channels.json'),
                 ]))
コード例 #12
0
 def test_expired(self):
     # The HTTPS server has an expired certificate (mocked so that its CA
     # is in the system's trusted path).
     with ExitStack() as stack:
         stack.push(make_http_server(
             self._directory, 8943, 'expired_cert.pem', 'expired_key.pem'))
         self.assertRaises(
             FileNotFoundError,
             get_download_manager().get_files,
             _https_pathify([
                 ('channel.channels_05.json', 'channels.json'),
                 ]))
コード例 #13
0
 def test_http_masquerades_as_https(self):
     # There's an HTTP server pretending to be an HTTPS server.  This
     # should fail to download over https URLs.
     with ExitStack() as stack:
         # By not providing an SSL context wrapped socket, this isn't
         # really an https server.
         stack.push(make_http_server(self._directory, 8943))
         self.assertRaises(
             FileNotFoundError,
             get_download_manager().get_files,
             _https_pathify([
                 ('channel.channels_05.json', 'channels.json'),
                 ]))
コード例 #14
0
 def test_auto_detect_curl(self):
     # If the environment variable is not set, we do auto-detection.  If udm
     # is not available on the system bus, we use the cURL downloader.
     import systemimage.download
     with ExitStack() as resources:
         resources.enter_context(reset_envar('SYSTEMIMAGE_PYCURL'))
         if 'SYSTEMIMAGE_PYCURL' in os.environ:
             del os.environ['SYSTEMIMAGE_PYCURL']
         mock = resources.enter_context(
             patch('dbus.SystemBus.get_object', side_effect=DBusException))
         resources.enter_context(
             patch.object(systemimage.download, 'pycurl', object()))
         self.assertIsInstance(
             get_download_manager(), CurlDownloadManager)
         mock.assert_called_once_with(DOWNLOADER_INTERFACE, '/')
コード例 #15
0
 def test_matched_duplicates(self):
     # A download that duplicates the destination location, but for which
     # the sources and checksums are the same is okay.
     content = b'x' * 100
     checksum = sha256(content).hexdigest()
     with open(os.path.join(self._serverdir, 'source.dat'), 'wb') as fp:
         fp.write(content)
     downloader = get_download_manager()
     downloads = []
     for url, dst in _http_pathify([('source.dat', 'local.dat'),
                                    ('source.dat', 'local.dat'),
                                    ]):
         downloads.append(Record(url, dst, checksum))
     downloader.get_files(downloads)
     self.assertEqual(os.listdir(config.tempdir), ['local.dat'])
コード例 #16
0
 def __init__(self):
     # Variables which manage state transitions.
     self._next = deque()
     self._debug_step = 1
     self.candidate_filter = None
     self.winner_filter = None
     # Variables which represent things we've learned.
     self.blacklist = None
     self.channels = None
     self.index = None
     self.winner = None
     self.files = []
     self.channel_switch = None
     # Other public attributes.
     self.downloader = get_download_manager()
     self._next.append(self._cleanup)
コード例 #17
0
 def test_download_404(self):
     # Start a group download of some big files.   One of the files won't
     # exist, so the entire group download should fail, and none of the
     # files should exist in the destination.
     self.assertEqual(os.listdir(config.tempdir), [])
     with ExitStack() as stack:
         serverdir = stack.enter_context(temporary_directory())
         stack.push(make_http_server(serverdir, 8980))
         # Create a couple of big files to download.
         write_bytes(os.path.join(serverdir, 'bigfile_1.dat'), 10)
         write_bytes(os.path.join(serverdir, 'bigfile_2.dat'), 10)
         write_bytes(os.path.join(serverdir, 'bigfile_3.dat'), 10)
         downloads = _http_pathify([
             ('bigfile_1.dat', 'bigfile_1.dat'),
             ('bigfile_2.dat', 'bigfile_2.dat'),
             ('bigfile_3.dat', 'bigfile_3.dat'),
             ('missing.txt', 'missing.txt'),
             ])
         self.assertRaises(FileNotFoundError,
                           get_download_manager().get_files,
                           downloads)
         # The temporary directory is empty.
         self.assertEqual(os.listdir(config.tempdir), [])
コード例 #18
0
 def test_mismatched_urls(self):
     # A download that duplicates the destination location, but for which
     # the source urls don't match, is not allowed.
     content = b'x' * 100
     checksum = sha256(content).hexdigest()
     with open(os.path.join(self._serverdir, 'source1.dat'), 'wb') as fp:
         fp.write(content)
     with open(os.path.join(self._serverdir, 'source2.dat'), 'wb') as fp:
         fp.write(content)
     downloader = get_download_manager()
     downloads = []
     for url, dst in _http_pathify([('source1.dat', 'local.dat'),
                                    ('source2.dat', 'local.dat'),
                                    ]):
         downloads.append(Record(url, dst, checksum))
     with self.assertRaises(DuplicateDestinationError) as cm:
         downloader.get_files(downloads)
     self.assertEqual(len(cm.exception.duplicates), 1)
     dst, dupes = cm.exception.duplicates[0]
     self.assertEqual(os.path.basename(dst), 'local.dat')
     self.assertEqual([r[0] for r in dupes],
                      ['http://localhost:8980/source1.dat',
                       'http://localhost:8980/source2.dat'])
     self.assertEqual(os.listdir(config.tempdir), [])
コード例 #19
0
    def test_duplicate_error_message(self):
        # When a duplicate destination error occurs, an error message gets
        # logged.  Make sure the error message is helpful.
        content = b'x' * 100
        checksum = sha256(content).hexdigest()
        with open(os.path.join(self._serverdir, 'source.dat'), 'wb') as fp:
            fp.write(content)
        downloader = get_download_manager()
        url = urljoin(config.http_base, 'source.dat')
        downloads = [
            Record(url, 'local.dat', checksum),
            # Mutate the checksum so they won't match.
            Record(url, 'local.dat', checksum[-1] + checksum[:-1]),
            ]
        with self.assertRaises(DuplicateDestinationError) as cm:
            downloader.get_files(downloads)
        self.assertMultiLineEqual(str(cm.exception), """
[   (   'local.dat',
        [   (   'http://localhost:8980/source.dat',
                'local.dat',
                '09ecb6ebc8bcefc733f6f2ec44f791abeed6a99edf0cc31519637898aebd52d8'),
            (   'http://localhost:8980/source.dat',
                'local.dat',
                '809ecb6ebc8bcefc733f6f2ec44f791abeed6a99edf0cc31519637898aebd52d')])]""")
コード例 #20
0
ファイル: keyring.py プロジェクト: ubports/system-image
def get_keyring(keyring_type, urls, sigkr, blacklist=None):
    """Download, verify, and unpack a keyring.

    The keyring .tar.xz file and its signature file are downloaded.  The
    signature is verified against the keys in the signature keyring gpg
    file.  If this fails, a SignatureError is raised and the files are
    deleted.

    If this succeeds, the tar.xz is unpacked, which should produce a
    keyring.gpg file containing the keyring, and a keyring.json file
    describing the keyring.  We load up the json file and verify that
    the keyring 'type' matches the type parameter and that the 'expiry'
    key, which names a UTC UNIX epoch timestamp, has not yet expired.
    Also, the 'model' key is checked - it is optional in the json file,
    and when it's missing, it means it applies to any model.

    If any of these condition occurred, a KeyringError is raised and the
    files are deleted.

    Assuming everything checks out, the .gpg file is copied to the cache
    location for the unpacked keyring, and the downloaded .tar.xz and
    .tar.xz.asc files are moved into place.  All the other ancillary
    files are deleted.

    :param keyring_type: The type of keyring file to download.  This can be
        one of 'archive-master', 'image-master', 'image-signing',
        'device-signing', or 'blacklist'.
    :param url: Either a string naming the url to the source of the keyring
        .tar.xz file (in which case the url to the associated .asc file will
        be calculated), or a 2-tuple naming the .tar.xz and .tar.xz.asc files.
    :param sigkr: The local keyring file that should be used to verify the
        downloaded signature.
    :param blacklist: When given, this is the signature blacklist file.
    :raises SignatureError: when the keyring signature does not match.
    :raises KeyringError: when any of the other verifying attributes of the
        downloaded keyring fails.
    """
    # Calculate the urls to the .tar.xz and .asc files.
    if isinstance(urls, tuple):
        srcurl, ascurl = urls
    else:
        srcurl = urls
        ascurl = urls + '.asc'
    tarxz_src = urljoin(config.https_base, srcurl)
    ascxz_src = urljoin(config.https_base, ascurl)
    # Calculate the local paths to the temporary download files.  The
    # blacklist goes to the data partition and all the other files go to the
    # cache partition.
    dstdir = (config.updater.data_partition if keyring_type == 'blacklist' else
              config.updater.cache_partition)
    tarxz_dst = os.path.join(dstdir, 'keyring.tar.xz')
    ascxz_dst = tarxz_dst + '.asc'
    # Delete any files that were previously present.  The download manager
    # will raise an exception if it finds a file already there.
    safe_remove(tarxz_dst)
    safe_remove(ascxz_dst)
    with ExitStack() as stack:
        # Let FileNotFoundError percolate up.
        get_download_manager().get_files([
            (tarxz_src, tarxz_dst),
            (ascxz_src, ascxz_dst),
        ])
        stack.callback(os.remove, tarxz_dst)
        stack.callback(os.remove, ascxz_dst)
        signing_keyring = getattr(config.gpg, sigkr.replace('-', '_'))
        with Context(signing_keyring, blacklist=blacklist) as ctx:
            ctx.validate(ascxz_dst, tarxz_dst)
        # The signature is good, so now unpack the tarball, load the json file
        # and verify its contents.
        keyring_gpg = os.path.join(config.tempdir, 'keyring.gpg')
        keyring_json = os.path.join(config.tempdir, 'keyring.json')
        with tarfile.open(tarxz_dst, 'r:xz') as tf:
            tf.extractall(config.tempdir)
        stack.callback(os.remove, keyring_gpg)
        stack.callback(os.remove, keyring_json)
        with open(keyring_json, 'r', encoding='utf-8') as fp:
            data = json.load(fp)
        # Check the mandatory keys first.
        json_type = data['type']
        if keyring_type != json_type:
            raise KeyringError(
                'keyring type mismatch; wanted: {}, got: {}'.format(
                    keyring_type, json_type))
        # Check the optional keys next.
        json_model = data.get('model')
        if json_model not in (config.device, None):
            raise KeyringError(
                'keyring model mismatch; wanted: {}, got: {}'.format(
                    config.device, json_model))
        expiry = data.get('expiry')
        if expiry is not None:
            # Get our current timestamp in UTC.
            timestamp = datetime.now(tz=timezone.utc).timestamp()
            if expiry < timestamp:
                # We've passed the expiration date for this keyring.
                raise KeyringError('expired keyring timestamp')
        # Everything checks out.  We now have the generic keyring.tar.xz and
        # keyring.tar.xz.asc files inside the cache (or data, in the case of
        # the blacklist) partition, which is where they need to be for
        # recovery.
        #
        # These files need to be renamed to their actual <keyring-type>.tar.xz
        # and .asc file names.
        #
        # We also want copies of these latter files to live in /var/lib so
        # that we don't have to download them again if we don't need to.
        if keyring_type == 'blacklist':
            tarxz_path = os.path.join(config.updater.data_partition,
                                      'blacklist.tar.xz')
        else:
            tarxz_path = getattr(config.gpg, keyring_type.replace('-', '_'))
        ascxz_path = tarxz_path + '.asc'
        makedirs(os.path.dirname(tarxz_path))
        safe_remove(tarxz_path)
        safe_remove(ascxz_path)
        shutil.copy(tarxz_dst, tarxz_path)
        shutil.copy(ascxz_dst, ascxz_path)
        # For all keyrings, copy the extracted .gpg file to the tempdir.  We
        # will always fallback to this path to avoid unpacking the .tar.xz
        # file every single time.
        gpg_path = os.path.join(config.tempdir, keyring_type + '.gpg')
        shutil.copy(keyring_gpg, gpg_path)
コード例 #21
0
 def _downloader(self, *args):
     return get_download_manager(*args)