예제 #1
0
def inner_context():
    stack = ExitStack()
    with stack:
        stack.enter_context(null_context())
        stack.push(exit_cb)
        stack.callback(other_cb, 10, "hi", answer=42)
        yield
예제 #2
0
    def test_instance_bypass(self):
        class Example(object):
            pass

        cm = Example()
        cm.__exit__ = object()
        stack = ExitStack()
        self.assertRaises(AttributeError, stack.enter_context, cm)
        stack.push(cm)
        self.assertIs(tuple(stack._exit_callbacks)[-1], cm)
예제 #3
0
    def test_instance_bypass(self):
        class Example(object):
            pass

        cm = Example()
        cm.__exit__ = object()
        stack = ExitStack()
        self.assertRaises(AttributeError, stack.enter_context, cm)
        stack.push(cm)
        self.assertIs(tuple(stack._exit_callbacks)[-1], cm)
예제 #4
0
    def _exit(self, exc_type, exc_val, exc_tb, daemon=False):
        stack = ExitStack()

        # called last
        @stack.push
        def exit_loop(exc_type, exc_val, exc_tb):
            if self.client.is_closed:
                return self._loop.__exit__(exc_type, exc_val, exc_tb)

        if threading.current_thread() is threading.main_thread():
            # TODO the main thread is not necessarily the last thread to finish.
            # Should the signal handler be removed in case it isn't?
            stack.push(self._signal_ctx)

        # called first
        # exit the client with the given daemon-ness, maybe leading the client to close
        @stack.push
        def exit_client(exc_type, exc_val, exc_tb):
            return self._call(self.client._aexit(exc_type, exc_val, exc_tb, daemon=daemon))

        return stack.__exit__(exc_type, exc_val, exc_tb)
예제 #5
0
class ScienceBeamParserBaseSession:
    def __init__(
        self,
        parser: 'ScienceBeamParser',
        temp_dir: Optional[str] = None,
        fulltext_processor_config: Optional[FullTextProcessorConfig] = None,
        document_request_parameters: Optional[DocumentRequestParameters] = None
    ):
        self.parser = parser
        self.exit_stack = ExitStack()
        self._temp_dir: Optional[str] = temp_dir
        if fulltext_processor_config is None:
            fulltext_processor_config = parser.fulltext_processor_config
        self.fulltext_processor_config = fulltext_processor_config
        if document_request_parameters is None:
            document_request_parameters = DocumentRequestParameters()
        self.document_request_parameters = document_request_parameters

    def __enter__(self) -> 'ScienceBeamParserBaseSession':
        return self

    def close(self):
        self.exit_stack.close()

    def __exit__(self, exc, value, tb):
        self.close()

    @property
    def temp_dir(self) -> str:
        if not self._temp_dir:
            temp_dir_context = TemporaryDirectory(  # pylint: disable=consider-using-with
                suffix='-sb-parser')
            self.exit_stack.push(temp_dir_context)
            self._temp_dir = temp_dir_context.__enter__()
        return self._temp_dir

    @property
    def temp_path(self) -> Path:
        return Path(self.temp_dir)
예제 #6
0
class Scope:
    """
    A context manager that allows to register error and exit callbacks.
    """

    _thread_locals = threading.local()

    @frozen
    class _ExitHandler:
        callback: Callable[[], Any]
        ignore_errors: bool = True

        def __exit__(self, exc_type, exc_value, exc_traceback):
            try:
                self.callback()
            except Exception:
                if not self.ignore_errors:
                    raise

    @frozen
    class _ErrorHandler(_ExitHandler):
        def __exit__(self, exc_type, exc_value, exc_traceback):
            if exc_type:
                return super().__exit__(exc_type=exc_type,
                                        exc_value=exc_value,
                                        exc_traceback=exc_traceback)

    def __init__(self):
        self._stack = ExitStack()
        self.enabled = True

    def on_error_do(self,
                    callback: Callable,
                    *args,
                    kwargs: Optional[Dict[str, Any]] = None,
                    ignore_errors: bool = False):
        """
        Registers a function to be called on scope exit because of an error.

        If ignore_errors is True, the errors from this function call
        will be ignored.
        """

        self._register_callback(self._ErrorHandler,
                                ignore_errors=ignore_errors,
                                callback=callback,
                                args=args,
                                kwargs=kwargs)

    def on_exit_do(self,
                   callback: Callable,
                   *args,
                   kwargs: Optional[Dict[str, Any]] = None,
                   ignore_errors: bool = False):
        """
        Registers a function to be called on scope exit.
        """

        self._register_callback(self._ExitHandler,
                                ignore_errors=ignore_errors,
                                callback=callback,
                                args=args,
                                kwargs=kwargs)

    def _register_callback(self,
                           handler_type,
                           callback: Callable,
                           args: Tuple[Any] = None,
                           kwargs: Dict[str, Any] = None,
                           ignore_errors: bool = False):
        if args or kwargs:
            callback = partial(callback, *args, **(kwargs or {}))

        self._stack.push(handler_type(callback, ignore_errors=ignore_errors))

    def add(self, cm: ContextManager[T]) -> T:
        """
        Enters a context manager and adds it to the exit stack.

        Returns: cm.__enter__() result
        """

        return self._stack.enter_context(cm)

    def enable(self):
        self.enabled = True

    def disable(self):
        self.enabled = False

    def close(self):
        self.__exit__(None, None, None)

    def __enter__(self) -> Scope:
        return self

    def __exit__(self, exc_type, exc_value, exc_traceback):
        if not self.enabled:
            return

        self._stack.__exit__(exc_type, exc_value, exc_traceback)
        self._stack.pop_all()  # prevent issues on repetitive calls

    @classmethod
    def current(cls) -> Scope:
        return cls._thread_locals.current

    @contextmanager
    def as_current(self):
        previous = getattr(self._thread_locals, 'current', None)
        self._thread_locals.current = self
        try:
            yield
        finally:
            self._thread_locals.current = previous
예제 #7
0
class TestKeyring(unittest.TestCase):
    """Test downloading and unpacking a keyring."""
    @classmethod
    def setUpClass(self):
        SystemImagePlugin.controller.set_mode(cert_pem='cert.pem')

    def setUp(self):
        self._stack = ExitStack()
        try:
            self._serverdir = self._stack.enter_context(temporary_directory())
            self._stack.push(
                make_http_server(self._serverdir, 8943, 'cert.pem', 'key.pem'))
        except:
            self._stack.close()
            raise

    def tearDown(self):
        self._stack.close()

    @configuration
    def test_good_path(self):
        # Everything checks out, with the simplest possible keyring.json.
        setup_keyrings('archive-master')
        setup_keyring_txz(
            'spare.gpg', 'archive-master.gpg', dict(type='image-master'),
            os.path.join(self._serverdir, 'gpg', 'image-master.tar.xz'))
        get_keyring('image-master', 'gpg/image-master.tar.xz',
                    'archive-master')
        with Context(config.gpg.archive_master) as ctx:
            self.assertEqual(ctx.fingerprints,
                             set(['289518ED3A0C4CFE975A0B32E0979A7EADE8E880']))

    @configuration
    def test_good_path_full_json(self):
        # Everything checks out, with a fully loaded keyring.json file.
        next_year = datetime.now(tz=timezone.utc) + timedelta(days=365)
        setup_keyrings('archive-master')
        setup_keyring_txz(
            'spare.gpg', 'archive-master.gpg',
            dict(type='image-master',
                 expiry=next_year.timestamp(),
                 model='nexus7'),
            os.path.join(self._serverdir, 'gpg', 'image-master.tar.xz'))
        get_keyring('image-master', 'gpg/image-master.tar.xz',
                    'archive-master')
        with Context(config.gpg.archive_master) as ctx:
            self.assertEqual(ctx.fingerprints,
                             set(['289518ED3A0C4CFE975A0B32E0979A7EADE8E880']))

    @configuration
    def test_good_path_model(self):
        # Everything checks out with the model specified.
        setup_keyrings()
        setup_keyring_txz(
            'spare.gpg', 'archive-master.gpg',
            dict(type='image-master', model='nexus7'),
            os.path.join(self._serverdir, 'gpg', 'image-master.tar.xz'))
        get_keyring('image-master', 'gpg/image-master.tar.xz',
                    'archive-master')
        with Context(config.gpg.archive_master) as ctx:
            self.assertEqual(ctx.fingerprints,
                             set(['289518ED3A0C4CFE975A0B32E0979A7EADE8E880']))

    @configuration
    def test_good_path_expiry(self):
        # Everything checks out, with the expiration date specified.
        next_year = datetime.now(tz=timezone.utc) + timedelta(days=365)
        setup_keyrings('archive-master')
        setup_keyring_txz(
            'spare.gpg', 'archive-master.gpg',
            dict(type='image-master', expiry=next_year.timestamp()),
            os.path.join(self._serverdir, 'gpg', 'image-master.tar.xz'))
        get_keyring('image-master', 'gpg/image-master.tar.xz',
                    'archive-master')
        with Context(config.gpg.archive_master) as ctx:
            self.assertEqual(ctx.fingerprints,
                             set(['289518ED3A0C4CFE975A0B32E0979A7EADE8E880']))

    @configuration
    def test_path_device_signing_keyring(self):
        # Get the device signing keyring.
        setup_keyrings('archive-master', 'image-master', 'image-signing')
        setup_keyring_txz(
            'spare.gpg', 'image-signing.gpg', dict(type='device-signing'),
            os.path.join(self._serverdir, 'gpg', 'stable', 'nexus7',
                         'device-signing.tar.xz'))
        url = 'gpg/{}/{}/device-signing.tar.xz'.format(config.channel,
                                                       config.device)
        get_keyring('device-signing', url, 'image-signing')
        with Context(config.gpg.device_signing) as ctx:
            self.assertEqual(ctx.fingerprints,
                             set(['94BE2CECF8A5AF9F3A10E2A6526B7016C3D2FB44']))

    @configuration
    def test_path_blacklist(self):
        # Get the blacklist keyring.
        setup_keyrings('archive-master', 'image-master')
        setup_keyring_txz(
            'spare.gpg', 'image-master.gpg', dict(type='blacklist'),
            os.path.join(self._serverdir, 'gpg/blacklist.tar.xz'))
        url = 'gpg/blacklist.tar.xz'.format(config.channel, config.device)
        get_keyring('blacklist', url, 'image-master')
        blacklist_path = os.path.join(config.tempdir, 'blacklist.tar.xz')
        with Context(blacklist_path) as ctx:
            self.assertEqual(ctx.fingerprints,
                             set(['94BE2CECF8A5AF9F3A10E2A6526B7016C3D2FB44']))

    @configuration
    def test_tar_xz_file_missing(self):
        # If the tar.xz file cannot be downloaded, an error is raised.
        tarxz_path = os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz')
        setup_keyrings()
        setup_keyring_txz('spare.gpg', 'archive-master.gpg',
                          dict(type='blacklist'), tarxz_path)
        os.remove(tarxz_path)
        self.assertRaises(FileNotFoundError, get_keyring, 'blacklist',
                          'gpg/blacklist.tar.xz', 'image-master')

    @configuration
    def test_asc_file_missing(self):
        # If the tar.xz.asc file cannot be downloaded, an error is raised.
        tarxz_path = os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz')
        setup_keyrings()
        setup_keyring_txz('spare.gpg', 'archive-master.gpg',
                          dict(type='blacklist'), tarxz_path)
        os.remove(tarxz_path + '.asc')
        self.assertRaises(FileNotFoundError, get_keyring, 'blacklist',
                          'gpg/blacklist.tar.xz', 'image-master')

    @configuration
    def test_bad_signature(self):
        # Both files are downloaded, but the signature does not match the
        # image-master key.
        setup_keyrings()
        # Use the spare key as the blacklist, signed by itself.  Since this
        # won't match the image-signing key, the check will fail.
        server_path = os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz')
        setup_keyring_txz('spare.gpg', 'spare.gpg', dict(type='blacklist'),
                          server_path)
        with self.assertRaises(SignatureError) as cm:
            get_keyring('blacklist', 'gpg/blacklist.tar.xz', 'image-master')
        error = cm.exception
        # The local file name will be keyring.tar.xz in the cache directory.
        basename = os.path.basename
        self.assertEqual(basename(error.data_path), 'keyring.tar.xz')
        self.assertEqual(basename(error.signature_path), 'keyring.tar.xz.asc')
        # The crafted blacklist.tar.xz file will have an unpredictable
        # checksum due to tarfile variablility.
        with open(server_path, 'rb') as fp:
            checksum = hashlib.md5(fp.read()).hexdigest()
        self.assertEqual(error.data_checksum, checksum)
        # The signature file's checksum is also unpredictable.
        with open(server_path + '.asc', 'rb') as fp:
            checksum = hashlib.md5(fp.read()).hexdigest()
        self.assertEqual(error.signature_checksum, checksum)

    @configuration
    def test_blacklisted_signature(self):
        # Normally, the signature would be good, except that the fingerprint
        # of the device signing key is blacklisted.
        setup_keyrings('archive-master', 'image-master')
        blacklist = os.path.join(config.tempdir, 'gpg', 'blacklist.tar.xz')
        # Blacklist the image-master keyring.
        setup_keyring_txz('image-master.gpg', 'image-master.gpg',
                          dict(type='blacklist'), blacklist)
        setup_keyring_txz(
            'image-signing.gpg', 'image-master.gpg',
            dict(type='image-signing'),
            os.path.join(self._serverdir, 'gpg', 'image-signing.tar.xz'))
        # Now put an image-signing key on the server and attempt to download
        # it.  Because the image-master is blacklisted, this will fail.
        self.assertRaises(SignatureError, get_keyring, 'image-signing',
                          'gpg/image-signing.tar.xz', 'image-master',
                          blacklist)

    @configuration
    def test_bad_json_type(self):
        # This type, while the signatures match, the keyring type in the
        # keyring.json file does not match.
        setup_keyrings()
        setup_keyring_txz(
            'device-signing.gpg', 'image-master.gpg', dict(type='master'),
            os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz'))
        with self.assertRaises(KeyringError) as cm:
            get_keyring('blacklist', 'gpg/blacklist.tar.xz', 'image-master')
        self.assertEqual(
            cm.exception.message,
            'keyring type mismatch; wanted: blacklist, got: master')

    @configuration
    def test_bad_json_model(self):
        # Similar to above, but with a non-matching model name.
        setup_keyrings()
        setup_keyring_txz(
            'device-signing.gpg', 'image-master.gpg',
            dict(type='blacklist', model='nexus0'),
            os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz'))
        with self.assertRaises(KeyringError) as cm:
            get_keyring('blacklist', 'gpg/blacklist.tar.xz', 'image-master')
        self.assertEqual(
            cm.exception.message,
            'keyring model mismatch; wanted: nexus7, got: nexus0')

    @configuration
    def test_expired(self):
        # Similar to above, but the expiry key in the json names a utc
        # timestamp that has already elapsed.
        last_year = datetime.now(tz=timezone.utc) + timedelta(days=-365)
        setup_keyrings()
        setup_keyring_txz(
            'device-signing.gpg', 'image-master.gpg',
            dict(type='blacklist',
                 model='nexus7',
                 expiry=last_year.timestamp()),
            os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz'))
        with self.assertRaises(KeyringError) as cm:
            get_keyring('blacklist', 'gpg/blacklist.tar.xz', 'image-master')
        self.assertEqual(cm.exception.message, 'expired keyring timestamp')

    @configuration
    def test_destination_image_master(self):
        # When a keyring is downloaded, we preserve its .tar.xz and
        # .tar.xz.asc files.
        setup_keyrings('archive-master')
        setup_keyring_txz(
            'image-master.gpg', 'archive-master.gpg',
            dict(type='image-master'),
            os.path.join(self._serverdir, 'gpg', 'image-master.tar.xz'))
        asc_path = config.gpg.image_master + '.asc'
        self.assertFalse(os.path.exists(config.gpg.image_master))
        self.assertFalse(os.path.exists(asc_path))
        get_keyring('image-master', 'gpg/image-master.tar.xz',
                    'archive-master')
        self.assertTrue(os.path.exists(config.gpg.image_master))
        self.assertTrue(os.path.exists(asc_path))
        with Context(config.gpg.archive_master) as ctx:
            self.assertTrue(ctx.verify(asc_path, config.gpg.image_master))

    @configuration
    def test_destination_image_signing(self):
        # When a keyring is downloaded, we preserve its .tar.xz and
        # .tar.xz.asc files.
        setup_keyrings('archive-master', 'image-master')
        setup_keyring_txz(
            'image-signing.gpg', 'image-master.gpg',
            dict(type='image-signing'),
            os.path.join(self._serverdir, 'gpg', 'image-signing.tar.xz'))
        asc_path = config.gpg.image_signing + '.asc'
        self.assertFalse(os.path.exists(config.gpg.image_signing))
        self.assertFalse(os.path.exists(asc_path))
        get_keyring('image-signing', 'gpg/image-signing.tar.xz',
                    'image-master')
        self.assertTrue(os.path.exists(config.gpg.image_signing))
        self.assertTrue(os.path.exists(asc_path))
        with Context(config.gpg.image_master) as ctx:
            self.assertTrue(ctx.verify(asc_path, config.gpg.image_signing))

    @configuration
    def test_destination_device_signing(self):
        # When a keyring is downloaded, we preserve its .tar.xz and
        # .tar.xz.asc files.
        setup_keyrings('archive-master', 'image-master', 'image-signing')
        setup_keyring_txz(
            'device-signing.gpg', 'image-signing.gpg',
            dict(type='device-signing'),
            os.path.join(self._serverdir, 'stable', 'nexus7',
                         'device-signing.tar.xz'))
        asc_path = config.gpg.device_signing + '.asc'
        self.assertFalse(os.path.exists(config.gpg.device_signing))
        self.assertFalse(os.path.exists(asc_path))
        get_keyring('device-signing', 'stable/nexus7/device-signing.tar.xz',
                    'image-signing')
        self.assertTrue(os.path.exists(config.gpg.device_signing))
        self.assertTrue(os.path.exists(asc_path))
        with Context(config.gpg.image_signing) as ctx:
            self.assertTrue(ctx.verify(asc_path, config.gpg.device_signing))

    @configuration
    def test_destination_blacklist(self):
        # Like above, but the blacklist files end up in the temporary
        # directory, since it's never persistent.
        setup_keyrings('archive-master', 'image-master')
        setup_keyring_txz(
            'spare.gpg', 'image-master.gpg', dict(type='blacklist'),
            os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz'))
        txz_path = os.path.join(config.updater.data_partition,
                                'blacklist.tar.xz')
        asc_path = txz_path + '.asc'
        self.assertFalse(os.path.exists(txz_path))
        self.assertFalse(os.path.exists(asc_path))
        get_keyring('blacklist', 'gpg/blacklist.tar.xz', 'image-master')
        self.assertTrue(os.path.exists(txz_path))
        self.assertTrue(os.path.exists(asc_path))
        with Context(config.gpg.image_master) as ctx:
            self.assertTrue(ctx.verify(asc_path, txz_path))
예제 #8
0
class TestLoadChannel(unittest.TestCase):
    """Test downloading and caching the channels.json file."""
    @classmethod
    def setUpClass(cls):
        SystemImagePlugin.controller.set_mode(cert_pem='cert.pem')

    def setUp(self):
        self._stack = ExitStack()
        self._state = State()
        try:
            self._serverdir = self._stack.enter_context(temporary_directory())
            self._stack.push(
                make_http_server(self._serverdir, 8943, 'cert.pem', 'key.pem'))
            copy('channel.channels_01.json', self._serverdir, 'channels.json')
            self._channels_path = os.path.join(self._serverdir,
                                               'channels.json')
        except:
            self._stack.close()
            raise

    def tearDown(self):
        self._stack.close()

    @configuration
    def test_load_channel_good_path(self):
        # A channels.json file signed by the image signing key, no blacklist.
        sign(self._channels_path, 'image-signing.gpg')
        setup_keyrings()
        self._state.run_thru('get_channel')
        channels = self._state.channels
        self.assertEqual(channels.daily.devices.nexus7.keyring.signature,
                         '/daily/nexus7/device-keyring.tar.xz.asc')

    @configuration
    def test_load_channel_bad_signature(self):
        # We get an error if the signature on the channels.json file is bad.
        sign(self._channels_path, 'spare.gpg')
        setup_keyrings()
        self._state.run_thru('get_channel')
        # At this point, the state machine has determined that the
        # channels.json file is not signed with the cached image signing key,
        # so it will try to download a new imaging signing key.  Let's put one
        # on the server, but it will not match the key that channels.json is
        # signed with.
        key_path = os.path.join(self._serverdir, 'gpg', 'image-signing.tar.xz')
        setup_keyring_txz('image-signing.gpg', 'image-master.gpg',
                          dict(type='image-signing'), key_path)
        # This will succeed by grabbing a new image-signing key.
        from systemimage.testing.controller import stop_downloader
        stop_downloader(SystemImagePlugin.controller)
        next(self._state)
        # With the next state transition, we'll go back to trying to get the
        # channel.json file.  Since the signature will still be bad, we'll get
        # a SignatureError this time.
        self.assertRaises(SignatureError, next, self._state)

    @configuration
    def test_load_channel_bad_signature_gets_fixed(self, config_d):
        # Like above, but the second download of the image signing key results
        # in a properly signed channels.json file.
        sign(self._channels_path, 'spare.gpg')
        setup_keyrings()
        self._state.run_thru('get_channel')
        # At this point, the state machine has determined that the
        # channels.json file is not signed with the cached image signing key,
        # so it will try to download a new imaging signing key.  Let's put one
        # on the server, but it will not match the key that channels.json is
        # signed with.
        self.assertIsNone(self._state.channels)
        setup_keyring_txz(
            'spare.gpg', 'image-master.gpg', dict(type='image-signing'),
            os.path.join(self._serverdir, 'gpg', 'image-signing.tar.xz'))
        # This will succeed by grabbing a new image-signing key.
        config = Configuration(config_d)
        with open(config.gpg.image_signing, 'rb') as fp:
            checksum = hashlib.md5(fp.read()).digest()
        next(self._state)
        with open(config.gpg.image_signing, 'rb') as fp:
            self.assertNotEqual(checksum, hashlib.md5(fp.read()).digest())
        # The next state transition will find that the channels.json file is
        # properly signed.
        next(self._state)
        self.assertIsNotNone(self._state.channels)
        self.assertEqual(
            self._state.channels.daily.devices.nexus7.keyring.signature,
            '/daily/nexus7/device-keyring.tar.xz.asc')

    @configuration
    def test_load_channel_blacklisted_signature(self, config_d):
        # We get an error if the signature on the channels.json file is good
        # but the key is blacklisted.
        sign(self._channels_path, 'image-signing.gpg')
        setup_keyrings()
        setup_keyring_txz(
            'image-signing.gpg', 'image-master.gpg', dict(type='blacklist'),
            os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz'))
        self._state.run_thru('get_channel')
        # We now have an image-signing key which is blacklisted.  This will
        # cause the state machine to try to download a new image signing key,
        # so let's put the cached one up on the server.  This will still be
        # backlisted though.
        config = Configuration(config_d)
        key_path = os.path.join(self._serverdir, 'gpg', 'image-signing.tar.xz')
        shutil.copy(config.gpg.image_signing, key_path)
        shutil.copy(config.gpg.image_signing + '.asc', key_path + '.asc')
        # Run the state machine through _get_channel() again, only this time
        # because the key is still blacklisted, we'll get an exception.
        self.assertRaises(SignatureError, self._state.run_thru, 'get_channel')
예제 #9
0
class TestDownloadIndex(unittest.TestCase):
    maxDiff = None

    @classmethod
    def setUpClass(self):
        SystemImagePlugin.controller.set_mode(cert_pem='cert.pem')

    def setUp(self):
        # Start the HTTPS server running.  Vend it out of a temporary
        # directory which we load up with the right files.
        self._stack = ExitStack()
        try:
            self._serverdir = self._stack.enter_context(temporary_directory())
            self._stack.push(
                make_http_server(self._serverdir, 8943, 'cert.pem', 'key.pem'))
        except:
            self._stack.close()
            raise

    def tearDown(self):
        self._stack.close()

    def _copysign(self, src, dst, keyring):
        server_dst = os.path.join(self._serverdir, dst)
        makedirs(os.path.dirname(server_dst))
        copy(src, self._serverdir, dst)
        sign(server_dst, keyring)

    @configuration
    def test_load_index_good_path(self):
        # Load the index.json pointed to by the channels.json.  All signatures
        # validate correctly and there is no device keyring or blacklist.
        self._copysign('index.channels_05.json', 'channels.json',
                       'image-signing.gpg')
        # index.index_04.json path B will win, with no bootme flags.
        self._copysign('index.index_04.json', 'stable/nexus7/index.json',
                       'image-signing.gpg')
        setup_keyrings()
        state = State()
        state.run_thru('get_index')
        self.assertEqual(
            state.index.global_.generated_at,
            datetime(2013, 4, 29, 18, 45, 27, tzinfo=timezone.utc))
        self.assertEqual(state.index.images[0].files[1].checksum, 'bcd')

    @configuration
    def test_load_index_with_device_keyring(self):
        # Here, the index.json file is signed with a device keyring.
        self._copysign('index.channels_02.json', 'channels.json',
                       'image-signing.gpg')
        # index.index_04.json.json path B will win, with no bootme flags.
        self._copysign('index.index_04.json', 'stable/nexus7/index.json',
                       'device-signing.gpg')
        setup_keyrings()
        setup_keyring_txz(
            'device-signing.gpg', 'image-signing.gpg',
            dict(type='device-signing'),
            os.path.join(self._serverdir, 'stable', 'nexus7', 'device.tar.xz'))
        state = State()
        state.run_thru('get_index')
        self.assertEqual(
            state.index.global_.generated_at,
            datetime(2013, 4, 29, 18, 45, 27, tzinfo=timezone.utc))
        self.assertEqual(state.index.images[0].files[1].checksum, 'bcd')

    @configuration
    def test_load_index_with_device_keyring_and_signing_key(self):
        # Here, the index.json file is signed with the image signing keyring,
        # even though there is a device key.  That's fine.
        self._copysign('index.channels_02.json', 'channels.json',
                       'image-signing.gpg')
        # index.index_04.json.json path B will win, with no bootme flags.
        self._copysign('index.index_04.json', 'stable/nexus7/index.json',
                       'image-signing.gpg')
        setup_keyrings()
        setup_keyring_txz(
            'device-signing.gpg', 'image-signing.gpg',
            dict(type='device-signing'),
            os.path.join(self._serverdir, 'stable', 'nexus7', 'device.tar.xz'))
        state = State()
        state.run_thru('get_index')
        self.assertEqual(
            state.index.global_.generated_at,
            datetime(2013, 4, 29, 18, 45, 27, tzinfo=timezone.utc))
        self.assertEqual(state.index.images[0].files[1].checksum, 'bcd')

    @configuration
    def test_load_index_with_bad_keyring(self):
        # Here, the index.json file is signed with a defective device keyring.
        self._copysign('index.channels_02.json', 'channels.json',
                       'image-signing.gpg')
        # This will be signed by a keyring that is not the device keyring.
        self._copysign('index.index_04.json', 'stable/nexus7/index.json',
                       'spare.gpg')
        setup_keyrings()
        setup_keyring_txz(
            'device-signing.gpg', 'image-signing.gpg',
            dict(type='device-signing'),
            os.path.join(self._serverdir, 'stable', 'nexus7', 'device.tar.xz'))
        state = State()
        state.run_until('get_index')
        self.assertRaises(SignatureError, next, state)

    @configuration
    def test_load_index_with_blacklist(self):
        # Here, we've blacklisted the device key.
        self._copysign('index.channels_02.json', 'channels.json',
                       'image-signing.gpg')
        # This will be signed by a keyring that is not the device keyring.
        self._copysign('index.index_04.json', 'stable/nexus7/index.json',
                       'device-signing.gpg')
        setup_keyrings()
        setup_keyring_txz(
            'device-signing.gpg', 'image-signing.gpg',
            dict(type='device-signing'),
            os.path.join(self._serverdir, 'stable', 'nexus7', 'device.tar.xz'))
        setup_keyring_txz(
            'device-signing.gpg', 'image-master.gpg', dict(type='blacklist'),
            os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz'))
        state = State()
        state.run_until('get_index')
        self.assertRaises(SignatureError, next, state)

    @configuration
    def test_missing_channel(self):
        # The system's channel does not exist.
        self._copysign('index.channels_03.json', 'channels.json',
                       'image-signing.gpg')
        # index.index_04.json path B will win, with no bootme flags.
        self._copysign('index.index_04.json', 'stable/nexus7/index.json',
                       'image-signing.gpg')
        setup_keyrings()
        # Our channel (stable) isn't in the channels.json file, so there's
        # nothing to do.  Running the state machine to its conclusion leaves
        # us with no index file.
        state = State()
        list(state)
        # There really is nothing left to do.
        self.assertIsNone(state.index)

    @configuration
    def test_missing_device(self):
        # The system's device does not exist.
        self._copysign('index.channels_04.json', 'channels.json',
                       'image-signing.gpg')
        # index.index_04.json path B will win, with no bootme flags.
        self._copysign('index.index_04.json', 'stable/nexus7/index.json',
                       'image-signing.gpg')
        setup_keyrings()
        # Our device (nexus7) isn't in the channels.json file, so there's
        # nothing to do.  Running the state machine to its conclusion leaves
        # us with no index file.
        state = State()
        list(state)
        # There really is nothing left to do.
        self.assertIsNone(state.index)
예제 #10
0
class ServerTestBase(unittest.TestCase):
    # Must override in base classes.
    INDEX_FILE = None
    CHANNEL_FILE = None
    CHANNEL = None
    DEVICE = None
    SIGNING_KEY = 'device-signing.gpg'

    # For more detailed output.
    maxDiff = None

    @classmethod
    def setUpClass(self):
        # Avoid circular imports.
        from systemimage.testing.nose import SystemImagePlugin
        SystemImagePlugin.controller.set_mode(cert_pem='cert.pem')

    def setUp(self):
        # Avoid circular imports.
        from systemimage.state import State
        self._resources = ExitStack()
        self._state = State()
        try:
            self._serverdir = self._resources.enter_context(
                temporary_directory())
            # Start up both an HTTPS and HTTP server.  The data files are
            # vended over the latter, everything else, over the former.
            self._resources.push(
                make_http_server(self._serverdir, 8943, 'cert.pem', 'key.pem'))
            self._resources.push(make_http_server(self._serverdir, 8980))
            # Set up the server files.
            assert self.CHANNEL_FILE is not None, (
                'Subclasses must set CHANNEL_FILE')
            copy(self.CHANNEL_FILE, self._serverdir, 'channels.json')
            sign(os.path.join(self._serverdir, 'channels.json'),
                 'image-signing.gpg')
            assert self.CHANNEL is not None, 'Subclasses must set CHANNEL'
            assert self.DEVICE is not None, 'Subclasses must set DEVICE'
            index_path = os.path.join(self._serverdir, self.CHANNEL,
                                      self.DEVICE, 'index.json')
            head, tail = os.path.split(index_path)
            assert self.INDEX_FILE is not None, (
                'Subclasses must set INDEX_FILE')
            copy(self.INDEX_FILE, head, tail)
            sign(index_path, self.SIGNING_KEY)
            setup_index(self.INDEX_FILE, self._serverdir, self.SIGNING_KEY)
        except:
            self._resources.close()
            raise
        self.addCleanup(self._resources.close)

    def _setup_server_keyrings(self, *, device_signing=True):
        # Only the archive-master key is pre-loaded.  All the other keys
        # are downloaded and there will be both a blacklist and device
        # keyring.  The four signed keyring tar.xz files and their
        # signatures end up in the proper location after the state machine
        # runs to completion.
        setup_keyrings('archive-master')
        setup_keyring_txz(
            'spare.gpg', 'image-master.gpg', dict(type='blacklist'),
            os.path.join(self._serverdir, 'gpg', 'blacklist.tar.xz'))
        setup_keyring_txz(
            'image-master.gpg', 'archive-master.gpg',
            dict(type='image-master'),
            os.path.join(self._serverdir, 'gpg', 'image-master.tar.xz'))
        setup_keyring_txz(
            'image-signing.gpg', 'image-master.gpg',
            dict(type='image-signing'),
            os.path.join(self._serverdir, 'gpg', 'image-signing.tar.xz'))
        if device_signing:
            setup_keyring_txz(
                'device-signing.gpg', 'image-signing.gpg',
                dict(type='device-signing'),
                os.path.join(self._serverdir, self.CHANNEL, self.DEVICE,
                             'device-signing.tar.xz'))
예제 #11
0
class TestDownload(unittest.TestCase):
    """Base class for testing the PyCURL and udm downloaders."""

    def setUp(self):
        super().setUp()
        self._resources = ExitStack()
        try:
            # Start the HTTP server running, vending files out of our test
            # data directory.
            directory = os.path.dirname(data_path('__init__.py'))
            self._resources.push(make_http_server(directory, 8980))
        except:
            self._resources.close()
            raise

    def tearDown(self):
        self._resources.close()
        super().tearDown()

    def _downloader(self, *args):
        return get_download_manager(*args)

    @configuration
    def test_good_path(self):
        # Download a bunch of files that exist.  No callback.
        self._downloader().get_files(_http_pathify([
            ('channel.channels_05.json', 'channels.json'),
            ('download.index_01.json', 'index.json'),
            ]))
        self.assertEqual(
            set(os.listdir(config.tempdir)),
            set(['channels.json', 'index.json']))

    @configuration
    def test_empty_download(self):
        # Empty download set completes successfully.  LP: #1245597.
        self._downloader().get_files([])
        # No TimeoutError is raised.

    @configuration
    def test_user_agent(self):
        # The User-Agent request header contains the build number.
        version = random.randint(0, 99)
        config.build_number = version
        config.device = 'geddyboard'
        config.channel = 'devel-trio'
        # Download a magic path which the server will interpret to return us
        # the User-Agent header value.
        self._downloader().get_files(_http_pathify([
            ('user-agent.txt', 'user-agent.txt'),
            ]))
        path = os.path.join(config.tempdir, 'user-agent.txt')
        with open(path, 'r', encoding='utf-8') as fp:
            user_agent = fp.read()
        self.assertEqual(
            user_agent,
            'Ubuntu System Image Upgrade Client: '
            'device=geddyboard;channel=devel-trio;build={}'.format(
                version))

    @configuration
    def test_download_with_callback(self):
        # Downloading calls the callback with some arguments.
        received_bytes = 0
        total_bytes = 0
        def callback(received, total):
            nonlocal received_bytes, total_bytes
            received_bytes = received
            total_bytes = total
        downloader = self._downloader(callback)
        downloader.get_files(_http_pathify([
            ('channel.channels_05.json', 'channels.json'),
            ('download.index_01.json', 'index.json'),
            ]))
        self.assertEqual(
            set(os.listdir(config.tempdir)),
            set(['channels.json', 'index.json']))
        self.assertEqual(received_bytes, 669)
        self.assertEqual(total_bytes, 669)

    @configuration
    def test_download_with_broken_callback(self):
        # If the callback raises an exception, it is logged and ignored.
        def callback(receive, total):
            raise RuntimeError
        exception = None
        def capture(message):
            nonlocal exception
            exception = message
        downloader = self._downloader(callback)
        with patch('systemimage.download.log.exception', capture):
            downloader.get_files(_http_pathify([
                ('channel.channels_05.json', 'channels.json'),
                ]))
        # The exception got logged.
        self.assertEqual(exception, 'Exception in progress callback')
        # The file still got downloaded.
        self.assertEqual(os.listdir(config.tempdir), ['channels.json'])

    # This test helps bump the udm-based downloader test coverage to 100%.
    @unittest.skipIf(USING_PYCURL, 'Test is not relevant for PyCURL')
    @configuration
    def test_timeout(self):
        # If the reactor times out, we get an exception.  We fake the timeout
        # by setting the attribute on the reactor, even though it successfully
        # completes its download without timing out.
        def finish_with_timeout(self, *args, **kws):
            self.timed_out = True
            self.quit()
        with patch('systemimage.udm.DownloadReactor._do_finished',
                   finish_with_timeout):
            self.assertRaises(
                TimeoutError,
                self._downloader().get_files,
                _http_pathify([('channel.channels_05.json', 'channels.json')])
                )
예제 #12
0
class TestCURL(unittest.TestCase):
    def setUp(self):
        super().setUp()
        self._resources = ExitStack()
        try:
            # Start the HTTP server running, vending files out of our test
            # data directory.
            directory = os.path.dirname(data_path('__init__.py'))
            self._resources.push(make_http_server(directory, 8980))
        except:
            self._resources.close()
            raise

    def tearDown(self):
        self._resources.close()
        super().tearDown()

    @configuration
    def test_multi_perform(self):
        # PyCURL's multi.perform() can return the E_CALL_MULTI_PEFORM status
        # which tells us to just try again.  This doesn't happen in practice,
        # but the code path needs coverage.  However, .perform() itself can't
        # be mocked because pycurl.CurlMulti is a built-in.  Fun.
        class FakeMulti:
            def perform(self):
                return pycurl.E_CALL_MULTI_PERFORM, 2
        done_once = False
        class Testable(CurlDownloadManager):
            def _do_once(self, multi, handles):
                nonlocal done_once
                if done_once:
                    return super()._do_once(multi, handles)
                else:
                    done_once = True
                    return super()._do_once(FakeMulti(), handles)
        Testable().get_files(_http_pathify([
            ('channel.channels_05.json', 'channels.json'),
            ('download.index_01.json', 'index.json'),
            ]))
        self.assertTrue(done_once)
        # The files still get downloaded.
        self.assertEqual(
            set(os.listdir(config.tempdir)),
            set(['channels.json', 'index.json']))

    @configuration
    def test_multi_fail(self):
        # PyCURL's multi.perform() can return a failure code (i.e. not E_OK)
        # which triggers a FileNotFoundError.  It doesn't really matter which
        # failure code it returns.
        class FakeMulti:
            def perform(self):
                return pycurl.E_READ_ERROR, 2
        class Testable(CurlDownloadManager):
            def _do_once(self, multi, handles):
                return super()._do_once(FakeMulti(), handles)
        with self.assertRaises(FileNotFoundError) as cm:
            Testable().get_files(_http_pathify([
                ('channel.channels_05.json', 'channels.json'),
                ('download.index_01.json', 'index.json'),
                ]))
        # One of the two files will be contained in the error message, but
        # which one is undefined, although in practice it will be the first
        # one.
        self.assertRegex(
            cm.exception.args[0],
            'http://localhost:8980/(channel.channels_05|index_01).json')
예제 #13
0
class TestDuplicateDownloads(unittest.TestCase):
    maxDiff = None

    def setUp(self):
        super().setUp()
        self._resources = ExitStack()
        try:
            self._serverdir = self._resources.enter_context(
                temporary_directory())
            self._resources.push(make_http_server(self._serverdir, 8980))
        except:
            self._resources.close()
            raise

    def tearDown(self):
        self._resources.close()
        super().tearDown()

    @configuration
    def test_matched_duplicates(self):
        # A download that duplicates the destination location, but for which
        # the sources and checksums are the same is okay.
        content = b'x' * 100
        checksum = sha256(content).hexdigest()
        with open(os.path.join(self._serverdir, 'source.dat'), 'wb') as fp:
            fp.write(content)
        downloader = get_download_manager()
        downloads = []
        for url, dst in _http_pathify([('source.dat', 'local.dat'),
                                       ('source.dat', 'local.dat'),
                                       ]):
            downloads.append(Record(url, dst, checksum))
        downloader.get_files(downloads)
        self.assertEqual(os.listdir(config.tempdir), ['local.dat'])

    @configuration
    def test_mismatched_urls(self):
        # A download that duplicates the destination location, but for which
        # the source urls don't match, is not allowed.
        content = b'x' * 100
        checksum = sha256(content).hexdigest()
        with open(os.path.join(self._serverdir, 'source1.dat'), 'wb') as fp:
            fp.write(content)
        with open(os.path.join(self._serverdir, 'source2.dat'), 'wb') as fp:
            fp.write(content)
        downloader = get_download_manager()
        downloads = []
        for url, dst in _http_pathify([('source1.dat', 'local.dat'),
                                       ('source2.dat', 'local.dat'),
                                       ]):
            downloads.append(Record(url, dst, checksum))
        with self.assertRaises(DuplicateDestinationError) as cm:
            downloader.get_files(downloads)
        self.assertEqual(len(cm.exception.duplicates), 1)
        dst, dupes = cm.exception.duplicates[0]
        self.assertEqual(os.path.basename(dst), 'local.dat')
        self.assertEqual([r[0] for r in dupes],
                         ['http://localhost:8980/source1.dat',
                          'http://localhost:8980/source2.dat'])
        self.assertEqual(os.listdir(config.tempdir), [])

    @configuration
    def test_mismatched_checksums(self):
        # A download that duplicates the destination location, but for which
        # the checksums don't match, is not allowed.
        content = b'x' * 100
        checksum = sha256(content).hexdigest()
        with open(os.path.join(self._serverdir, 'source.dat'), 'wb') as fp:
            fp.write(content)
        downloader = get_download_manager()
        url = urljoin(config.http_base, 'source.dat')
        downloads = [
            Record(url, 'local.dat', checksum),
            # Mutate the checksum so they won't match.
            Record(url, 'local.dat', checksum[-1] + checksum[:-1]),
            ]
        with self.assertRaises(DuplicateDestinationError) as cm:
            downloader.get_files(downloads)
        self.assertEqual(len(cm.exception.duplicates), 1)
        dst, dupes = cm.exception.duplicates[0]
        self.assertEqual(os.path.basename(dst), 'local.dat')
        self.assertEqual([r[0] for r in dupes],
                         ['http://localhost:8980/source.dat',
                          'http://localhost:8980/source.dat'])
        # The records in the exception aren't sorted by checksum.
        self.assertEqual(
            sorted(r[2] for r in dupes),
            ['09ecb6ebc8bcefc733f6f2ec44f791abeed6a99edf0cc31519637898aebd52d8'
             ,
             '809ecb6ebc8bcefc733f6f2ec44f791abeed6a99edf0cc31519637898aebd52d'
             ])
        self.assertEqual(os.listdir(config.tempdir), [])

    @configuration
    def test_duplicate_error_message(self):
        # When a duplicate destination error occurs, an error message gets
        # logged.  Make sure the error message is helpful.
        content = b'x' * 100
        checksum = sha256(content).hexdigest()
        with open(os.path.join(self._serverdir, 'source.dat'), 'wb') as fp:
            fp.write(content)
        downloader = get_download_manager()
        url = urljoin(config.http_base, 'source.dat')
        downloads = [
            Record(url, 'local.dat', checksum),
            # Mutate the checksum so they won't match.
            Record(url, 'local.dat', checksum[-1] + checksum[:-1]),
            ]
        with self.assertRaises(DuplicateDestinationError) as cm:
            downloader.get_files(downloads)
        self.assertMultiLineEqual(str(cm.exception), """
[   (   'local.dat',
        [   (   'http://localhost:8980/source.dat',
                'local.dat',
                '09ecb6ebc8bcefc733f6f2ec44f791abeed6a99edf0cc31519637898aebd52d8'),
            (   'http://localhost:8980/source.dat',
                'local.dat',
                '809ecb6ebc8bcefc733f6f2ec44f791abeed6a99edf0cc31519637898aebd52d')])]""")
예제 #14
0
class TestGSMDownloads(unittest.TestCase):
    def setUp(self):
        super().setUp()
        # Patch this method so that we can verify both the value of the flag
        # that system-image sets and the value that u-d-m's group downloader
        # records and uses.  This is the only thing we can really
        # automatically test given that e.g. we won't have GSM in development.
        self._gsm_set_flag = None
        self._gsm_get_flag = None
        self._original = None
        def set_gsm(iface, *, allow_gsm):
            self._gsm_set_flag = allow_gsm
            self._original(iface, allow_gsm=allow_gsm)
            self._gsm_get_flag = iface.isGSMDownloadAllowed()
        self._resources = ExitStack()
        try:
            # Start the HTTP server running, vending files out of our test
            # data directory.
            directory = os.path.dirname(data_path('__init__.py'))
            self._resources.push(make_http_server(directory, 8980))
            # Patch the GSM setting method to capture what actually happens.
            self._original = getattr(UDMDownloadManager, '_set_gsm')
            self._resources.enter_context(patch(
                'systemimage.udm.UDMDownloadManager._set_gsm', set_gsm))
            self._resources.callback(setattr, self, '_original', None)
        except:
            self._resources.close()
            raise

    def tearDown(self):
        self._resources.close()
        super().tearDown()

    @configuration
    def test_manual_downloads_gsm_allowed(self, config_d):
        # When auto_download is 0, manual downloads are enabled so assuming
        # the user knows what they're doing, GSM downloads are allowed.
        config = Configuration(config_d)
        Settings(config).set('auto_download', '0')
        get_download_manager().get_files(_http_pathify([
            ('channel.channels_05.json', 'channels.json')
            ]))
        self.assertTrue(self._gsm_set_flag)
        self.assertTrue(self._gsm_get_flag)

    @configuration
    def test_wifi_downloads_gsm_disallowed(self, config_d):
        # Obviously GSM downloads are not allowed when downloading
        # automatically on wifi-only.
        config = Configuration(config_d)
        Settings(config).set('auto_download', '1')
        get_download_manager().get_files(_http_pathify([
            ('channel.channels_05.json', 'channels.json')
            ]))
        self.assertFalse(self._gsm_set_flag)
        self.assertFalse(self._gsm_get_flag)

    @configuration
    def test_always_downloads_gsm_allowed(self, config_d):
        # GSM downloads are allowed when always downloading.
        config = Configuration(config_d)
        Settings(config).set('auto_download', '2')
        get_download_manager().get_files(_http_pathify([
            ('channel.channels_05.json', 'channels.json')
            ]))
        self.assertTrue(self._gsm_set_flag)
        self.assertTrue(self._gsm_get_flag)
예제 #15
0
파일: backup.py 프로젝트: brownan/backathon
def backup(repo, progress=None, single=False):
    """Perform a backup

    This is usually called from Repository.backup() and is tightly integrated
    with the Repository class. It lives in its own module for organizational
    reasons.

    :type repo: backathon.repository.Repository
    :param progress: A callback function that provides status updates on the
        scan
    :param single: If this parameter is true, the backup process will all
        happen in a single thread. This can help with debugging and profiling.

    The progress callable takes two parameters: the backup count and backup
    total.
    """
    if models.FSEntry.objects.using(repo.db).filter(new=True).exists():
        # This happens when a new root is added but hasn't been scanned yet.
        raise RuntimeError("You need to run a scan first")

    to_backup = models.FSEntry.objects.using(repo.db).filter(obj__isnull=True)

    # The ready_to_backup set is the set of all nodes whose children have all
    # already been backed up. In other words, these are the entries that we
    # can back up right now.
    ready_to_backup = to_backup.exclude(
        # The sub query selects the *parents* of entries that are not yet
        # backed up. Therefore, we're excluding entries whose children are
        # not yet backed up.
        id__in=to_backup.exclude(parent__isnull=True).values("parent_id"))

    # The two above querysets remain unevaluated. We therefore get new results
    # on each call to .exists() below. Calls to .iterator() always return new
    # results.

    backup_total = to_backup.count()
    backup_count = 0

    if single:
        executor = DummyExecutor()
    else:
        executor = concurrent.futures.ProcessPoolExecutor(
            max_workers=NUM_WORKERS, )
        # SQLite connections should not be forked, according to the SQLite
        # documentation. Django and/or Python may have some protections
        # from this problem, but I'm not aware of any, so I'm taking caution and
        # closing all connections before forcing the process pool to immediately
        # launch the processes by submitting a dummy task.
        connections.close_all()
        executor.submit(time.time).result()

    tasks = set()

    contexts = ExitStack()
    with contexts:
        contexts.enter_context(executor)

        # Cancel all tasks that haven't been started yet
        def on_exit():
            for t in tasks:
                t.cancel()

        contexts.callback(on_exit)

        def catch_sigint(exc_type, exc_value, traceback):
            if exc_type and issubclass(exc_type, KeyboardInterrupt):
                print()
                print("Ctrl-C caught. Finishing the current batch of "
                      "uploads, please wait...")

        contexts.push(catch_sigint)

        while to_backup.exists():

            ct = 0
            last_checkpoint = time.monotonic()

            iterator = ready_to_backup.iterator()
            for entry_batch in batcher(iterator, BATCH_SIZE):
                ct += 1

                # Assert our query is working correctly and that there are no
                # SQLite isolation problems (entries we've already backed up
                # re-appearing later in the same query)
                assert all(entry.obj_id is None for entry in entry_batch)

                tasks.add(executor.submit(backup_entry, repo, entry_batch))

                # Don't put the entire to_backup result set in the queue at
                # once, to save memory.
                # If there are too many unfinished tasks, wait for one to
                # finish.
                if len(tasks) >= NUM_WORKERS + 1 or single:
                    done, tasks = concurrent.futures.wait(
                        tasks,
                        timeout=None,
                        return_when=concurrent.futures.FIRST_COMPLETED,
                    )

                    for f in done:
                        backup_count += f.result()
                        if progress is not None:
                            progress(backup_count, backup_total)

                # SQLite won't auto-checkpoint the write-ahead log while we
                # have the query iterator still open. So we force the inner
                # loop to exit every once in a while and force a WAL
                # checkpoint to keep the WAL from growing unbounded.
                if time.monotonic() - last_checkpoint > 30:
                    # Note: closing the iterator should close the cursor
                    # within it, but I think this is relying on reference
                    # counted garbage collection.
                    # If we run into problems, we'll have to find a different
                    # strategy to run checkpoints
                    iterator.close()
                    with connections[repo.db].cursor() as cursor:
                        cursor.execute("PRAGMA wal_checkpoint=RESTART")

            # Sanity check: if we entered the outer loop but the inner loop's
            # query didn't select anything, then we're not making progress and
            # may be caught in an infinite loop. In particular, this could happen
            # if we somehow got a cycle in the FSEntry tree in the database.
            # There would be entries needing backing up, but none of them have
            # all their dependent children backed up.
            assert ct > 0

            # Collect results for the rest of the tasks. We have to do this
            # at the end of each inner loop to guarantee a correct ordering
            # to backed up entries. Items selected next loop could depend on
            # items still in process in the pool.
            # This stalls the workers but it doesn't end up costing all that
            # much time compared to time spent working.
            for f in concurrent.futures.as_completed(tasks):
                backup_count += f.result()
                if progress is not None:
                    progress(backup_count, backup_total)
            tasks.clear()

    # End of outer "while" loop, and end of the contexts ExitStack. The
    # Executor is shut down at this point.

    # Now add the Snapshot object(s) to the database representing this backup
    # run. There's one snapshot per root, but we give them all the same datetime
    # so they can still be grouped together in queries.
    now = timezone.now()
    for root in models.FSEntry.objects.using(
            repo.db).filter(parent__isnull=True):
        assert root.obj_id is not None
        with atomic_immediate(using=repo.db):
            ss = models.Snapshot.objects.using(repo.db).create(
                path=root.path,
                root_id=root.obj_id,
                date=now,
            )
            repo.put_snapshot(ss)

    with connections[repo.db].cursor() as cursor:
        cursor.execute("ANALYZE")
예제 #16
0
class TestWinnerDownloads(unittest.TestCase):
    """Test full end-to-end downloads through index.json."""
    @classmethod
    def setUpClass(self):
        SystemImagePlugin.controller.set_mode(cert_pem='cert.pem')

    def setUp(self):
        # Start both an HTTP and an HTTPS server running.  The former is for
        # the zip files and the latter is for everything else.  Vend them out
        # of a temporary directory which we load up with the right files.
        self._stack = ExitStack()
        try:
            self._serverdir = self._stack.enter_context(temporary_directory())
            copy('winner.channels_01.json', self._serverdir, 'channels.json')
            sign(os.path.join(self._serverdir, 'channels.json'),
                 'image-signing.gpg')
            # Path B will win, with no bootme flags.
            self._indexpath = os.path.join('stable', 'nexus7', 'index.json')
            copy('winner.index_02.json', self._serverdir, self._indexpath)
            sign(os.path.join(self._serverdir, self._indexpath),
                 'image-signing.gpg')
            # Create every file in path B.  The file contents will be the
            # checksum value.  We need to create the signatures on the fly.
            setup_index('winner.index_02.json', self._serverdir,
                        'image-signing.gpg')
            self._stack.push(
                make_http_server(self._serverdir, 8943, 'cert.pem', 'key.pem'))
            self._stack.push(make_http_server(self._serverdir, 8980))
        except:
            self._stack.close()
            raise

    def tearDown(self):
        self._stack.close()

    @configuration
    def test_calculate_candidates(self):
        # Calculate the candidate paths.
        setup_keyrings()
        state = State()
        # Run the state machine until we get an index file.
        state.run_until('calculate_winner')
        candidates = get_candidates(state.index, 100)
        # There are three candidate upgrade paths.
        self.assertEqual(len(candidates), 3)
        descriptions = []
        for image in candidates[0]:
            # There's only one description per image so order doesn't matter.
            descriptions.extend(image.descriptions.values())
        self.assertEqual(descriptions, ['Full A', 'Delta A.1', 'Delta A.2'])
        descriptions = []
        for image in candidates[1]:
            # There's only one description per image so order doesn't matter.
            descriptions.extend(image.descriptions.values())
        self.assertEqual(descriptions, ['Full B', 'Delta B.1', 'Delta B.2'])
        descriptions = []
        for image in candidates[2]:
            # There's only one description per image so order doesn't matter.
            descriptions.extend(image.descriptions.values())
        self.assertEqual(descriptions, ['Full C', 'Delta C.1'])

    @configuration
    def test_calculate_winner(self):
        # Calculate the winning upgrade path.
        setup_keyrings()
        state = State()
        touch_build(100)
        # Run the state machine long enough to get the candidates and winner.
        state.run_thru('calculate_winner')
        # There are three candidate upgrade paths.
        descriptions = []
        for image in state.winner:
            # There's only one description per image so order doesn't matter.
            descriptions.extend(image.descriptions.values())
        self.assertEqual(descriptions, ['Full B', 'Delta B.1', 'Delta B.2'])

    @configuration
    def test_download_winners(self):
        # Check that all the winning path's files are downloaded.
        setup_keyrings()
        state = State()
        touch_build(100)
        # Run the state machine until we download the files.
        state.run_thru('download_files')

        # The B path files contain their checksums.
        def assert_file_contains(filename, contents):
            path = os.path.join(config.updater.cache_partition, filename)
            with open(path, encoding='utf-8') as fp:
                self.assertEqual(fp.read(), contents)

        assert_file_contains('5.txt', '345')
        assert_file_contains('6.txt', '456')
        assert_file_contains('7.txt', '567')
        # Delta B.1 files.
        assert_file_contains('8.txt', '678')
        assert_file_contains('9.txt', '789')
        assert_file_contains('a.txt', '89a')
        # Delta B.2 files.
        assert_file_contains('b.txt', '9ab')
        assert_file_contains('d.txt', 'fed')
        assert_file_contains('c.txt', 'edc')

    @configuration
    def test_download_winners_overwrite(self):
        # Check that all the winning path's files are downloaded, even if
        # those files already exist in their destination paths.
        setup_keyrings()
        state = State()
        touch_build(100)
        # Run the state machine until we download the files.
        for basename in '56789abcd':
            base = os.path.join(config.updater.cache_partition, basename)
            path = base + '.txt'
            with open(path, 'w', encoding='utf-8') as fp:
                print('stale', file=fp)
        state.run_thru('download_files')

        # The B path files contain their checksums.
        def assert_file_contains(filename, contents):
            path = os.path.join(config.updater.cache_partition, filename)
            with open(path, encoding='utf-8') as fp:
                self.assertEqual(fp.read(), contents)

        assert_file_contains('5.txt', '345')
        assert_file_contains('6.txt', '456')
        assert_file_contains('7.txt', '567')
        # Delta B.1 files.
        assert_file_contains('8.txt', '678')
        assert_file_contains('9.txt', '789')
        assert_file_contains('a.txt', '89a')
        # Delta B.2 files.
        assert_file_contains('b.txt', '9ab')
        assert_file_contains('d.txt', 'fed')
        assert_file_contains('c.txt', 'edc')

    @configuration
    def test_download_winners_signed_by_device_key(self):
        # Check that all the winning path's files are downloaded, even when
        # they are signed by the device key instead of the image signing
        # master.
        setup_keyrings()
        # To set up the device signing key, we need to load channels_03.json
        # and copy the device keyring to the server.
        copy('winner.channels_02.json', self._serverdir, 'channels.json')
        sign(os.path.join(self._serverdir, 'channels.json'),
             'image-signing.gpg')
        setup_keyring_txz(
            'device-signing.gpg', 'image-signing.gpg',
            dict(type='device-signing'),
            os.path.join(self._serverdir, 'stable', 'nexus7', 'device.tar.xz'))
        # The index.json file and all the downloadable files must now be
        # signed with the device key.
        sign(os.path.join(self._serverdir, self._indexpath),
             'device-signing.gpg')
        setup_index('winner.index_02.json', self._serverdir,
                    'device-signing.gpg')
        touch_build(100)
        # Run the state machine until we download the files.
        state = State()
        state.run_thru('download_files')

        # The B path files contain their checksums.
        def assert_file_contains(filename, contents):
            path = os.path.join(config.updater.cache_partition, filename)
            with open(path, encoding='utf-8') as fp:
                self.assertEqual(fp.read(), contents)

        assert_file_contains('5.txt', '345')
        assert_file_contains('6.txt', '456')
        assert_file_contains('7.txt', '567')
        # Delta B.1 files.
        assert_file_contains('8.txt', '678')
        assert_file_contains('9.txt', '789')
        assert_file_contains('a.txt', '89a')
        # Delta B.2 files.
        assert_file_contains('b.txt', '9ab')
        assert_file_contains('d.txt', 'fed')
        assert_file_contains('c.txt', 'edc')

    @configuration
    def test_download_winners_signed_by_signing_key_with_device_key(self):
        # Check that all the winning path's files are downloaded, even when
        # they are signed by the device key instead of the image signing
        # master.
        setup_keyrings()
        # To set up the device signing key, we need to load this channels.json
        # file and copy the device keyring to the server.
        copy('winner.channels_02.json', self._serverdir, 'channels.json')
        sign(os.path.join(self._serverdir, 'channels.json'),
             'image-signing.gpg')
        setup_keyring_txz(
            'device-signing.gpg', 'image-signing.gpg',
            dict(type='device-signing'),
            os.path.join(self._serverdir, 'stable', 'nexus7', 'device.tar.xz'))
        sign(os.path.join(self._serverdir, self._indexpath),
             'device-signing.gpg')
        # All the downloadable files are now signed with the image signing key.
        setup_index('winner.index_02.json', self._serverdir,
                    'image-signing.gpg')
        touch_build(100)
        # Run the state machine until we download the files.
        state = State()
        state.run_thru('download_files')

        # The B path files contain their checksums.
        def assert_file_contains(filename, contents):
            path = os.path.join(config.updater.cache_partition, filename)
            with open(path, encoding='utf-8') as fp:
                self.assertEqual(fp.read(), contents)

        assert_file_contains('5.txt', '345')
        assert_file_contains('6.txt', '456')
        assert_file_contains('7.txt', '567')
        # Delta B.1 files.
        assert_file_contains('8.txt', '678')
        assert_file_contains('9.txt', '789')
        assert_file_contains('a.txt', '89a')
        # Delta B.2 files.
        assert_file_contains('b.txt', '9ab')
        assert_file_contains('d.txt', 'fed')
        assert_file_contains('c.txt', 'edc')

    @configuration
    def test_download_winners_bad_checksums(self):
        # Similar to the various good paths, except because the checksums are
        # wrong in this index.json file, we'll get a error when downloading.
        copy('winner.index_01.json', self._serverdir, self._indexpath)
        sign(os.path.join(self._serverdir, self._indexpath),
             'image-signing.gpg')
        setup_index('winner.index_01.json', self._serverdir,
                    'image-signing.gpg')
        setup_keyrings()
        state = State()
        touch_build(100)
        # Run the state machine until we're prepped to download
        state.run_until('download_files')
        # Now try to download the files and get the error.
        with self.assertRaises(FileNotFoundError) as cm:
            next(state)
        self.assertIn('HASH ERROR', str(cm.exception))

    @configuration
    def test_download_winners_signed_by_wrong_key(self):
        # There is a device key, but the image files are signed by the image
        # signing key, which according to the spec means the files are not
        # signed correctly.
        setup_keyrings()
        # To set up the device signing key, we need to load this channels.json
        # file and copy the device keyring to the server.
        copy('winner.channels_02.json', self._serverdir, 'channels.json')
        sign(os.path.join(self._serverdir, 'channels.json'),
             'image-signing.gpg')
        setup_keyring_txz(
            'device-signing.gpg', 'image-signing.gpg',
            dict(type='device-signing'),
            os.path.join(self._serverdir, 'stable', 'nexus7', 'device.tar.xz'))
        sign(os.path.join(self._serverdir, self._indexpath),
             'device-signing.gpg')
        # All the downloadable files are now signed with a bogus key.
        setup_index('winner.index_02.json', self._serverdir, 'spare.gpg')
        touch_build(100)
        # Run the state machine until just before we download the files.
        state = State()
        state.run_until('download_files')
        # The next state transition will fail because of the missing signature.
        self.assertRaises(SignatureError, next, state)
        # There are no downloaded files.
        txtfiles = set(filename for filename in os.listdir(config.tempdir)
                       if os.path.splitext(filename)[1] == '.txt')
        self.assertEqual(len(txtfiles), 0)

    @configuration
    def test_no_download_winners_with_missing_signature(self):
        # If one of the download files is missing a signature, none of the
        # files get downloaded and get_files() fails.
        setup_keyrings()
        state = State()
        touch_build(100)
        # Remove a signature.
        os.remove(os.path.join(self._serverdir, '6/7/8.txt.asc'))
        # Run the state machine to calculate the winning path.
        state.run_until('download_files')
        # The next state transition will fail because of the missing signature.
        self.assertRaises(FileNotFoundError, next, state)
        # There are no downloaded files.
        txtfiles = set(filename for filename in os.listdir(config.tempdir)
                       if os.path.splitext(filename)[1] == '.txt')
        self.assertEqual(len(txtfiles), 0, txtfiles)

    @configuration
    def test_no_download_winners_with_bad_signature(self):
        # If one of the download files has a bad a signature, none of the
        # downloaded files are available.
        setup_keyrings()
        state = State()
        touch_build(100)
        # Break a signature
        sign(os.path.join(self._serverdir, '6', '7', '8.txt'), 'spare.gpg')
        # Run the state machine to calculate the winning path.
        state.run_until('download_files')
        # The next state transition will fail because of the missing signature.
        self.assertRaises(SignatureError, next, state)
        # There are no downloaded files.
        txtfiles = set(filename for filename in os.listdir(config.tempdir)
                       if os.path.splitext(filename)[1] == '.txt')
        self.assertEqual(len(txtfiles), 0)