Esempio n. 1
0
    def test_reset(self, client):
        # Read
        s = client.config.get(Filter(names=["my-int-config"]))
        assert len(s.items) == 1
        item = s.items[0]
        assert item.name == "my-int-config"
        assert item.value == "12"

        # Set new value
        s = client.config.set(
            ConfigUpdate(
                items=[ConfigItemUpdate(name="my-int-config", value="777")]))

        # Read
        s = client.config.get(Filter(names=["my-int-config"]))
        assert len(s.items) == 1
        item = s.items[0]
        assert item.name == "my-int-config"
        assert item.value == "777"

        # Reset
        s = client.config.reset(Filter(names=["my-int-config"]))

        # Read
        s = client.config.get(Filter(names=["my-int-config"]))
        assert len(s.items) == 1
        item = s.items[0]
        assert item.name == "my-int-config"
        assert item.value == "12"
Esempio n. 2
0
    def test_set_and_reset(self, client):
        # Set and read back configuration
        ln = "updated_logger"

        # Default
        s = client.log.get(Filter(names=[ln]))
        assert len(s.items) == 1
        lc = s.items[0]
        assert lc.name == ln
        assert lc.enabled
        assert lc.level == LoggerLevel.LVL_UNKNOWN
        assert not self.loggers_file.is_file()

        # Set
        s = client.log.set(
            LoggerUpdate(items=[
                LoggerConfig(
                    name=ln, enabled=True, level=LoggerLevel.LVL_WARNING)
            ]))
        lc = s.items[0]
        assert lc.name == ln
        assert lc.enabled
        assert lc.level == LoggerLevel.LVL_WARNING
        assert self.loggers_file.is_file()

        # Verify model
        with self.loggers_file.open("r") as f:
            model = json.load(f)
        assert len(model) == 1
        assert model[ln] == "WARNING"

        # Get
        s = client.log.get(Filter(names=[ln]))
        lc = s.items[0]
        assert lc.name == ln
        assert lc.enabled
        assert lc.level == LoggerLevel.LVL_WARNING

        # Reset
        s = client.log.reset(Filter(names=[ln]))
        lc = s.items[0]
        assert lc.name == ln
        assert lc.enabled
        assert lc.level == LoggerLevel.LVL_UNKNOWN
        assert self.loggers_file.is_file()

        # Verify model
        with self.loggers_file.open("r") as f:
            model = json.load(f)
        assert len(model) == 0

        # Get
        s = client.log.get(Filter(names=[ln]))
        lc = s.items[0]
        assert lc.name == ln
        assert lc.enabled
        assert lc.level == LoggerLevel.LVL_UNKNOWN
Esempio n. 3
0
 def test_get_empty(self, client):
     # Try to get items with empty request
     try:
         client.config.get(Filter(names=[""]))
         raise AssertionError("Shouldn't get here")
     except RpcException as e:
         assert e.rc == ResultCode.ERROR_PARAM_MISSING
Esempio n. 4
0
 def test_get_unknown(self, client):
     # Try to get unknown item
     try:
         client.config.get(Filter(names=["unknown"]))
         raise AssertionError("Shouldn't get here")
     except RpcException as e:
         assert e.rc == ResultCode.ERROR_ITEM_UNKNOWN
Esempio n. 5
0
 def test_get_ok(self, client):
     # Get item
     s = client.config.get(Filter())
     assert len(s.items) == 1
     item = s.items[0]
     assert item.name == "my-int-config"
     assert item.value == "12"
Esempio n. 6
0
 def test_get_missing_params(self, client):
     # Try with bad filters
     try:
         client.log.get(Filter())
         raise AssertionError("shouldn't get here")
     except RpcException as e:
         assert e.rc == ResultCode.ERROR_PARAM_MISSING
Esempio n. 7
0
    def test_rolling_logs(self):
        # Verify rolling is working OK with very short rolling interval
        self.workspace_path.mkdir()
        with (self.workspace_path / "config.json").open("w") as f:
            json.dump({
                "rpc-logs-interval-unit": "s",
                "rpc-logs-interval": "1"
            }, f)

        # Create server
        self.new_server_instance()

        # Loop to generate some logs
        init = time.time()
        while time.time() - init < 3:
            self.server.client.log.get(Filter(names=[""]))

        # Shutdown
        self.shutdown_server_instance()

        # Verify several logs files are generated
        log_files = list((self.workspace_path / "logs" /
                          "LogsManager").glob("LogsManager.log*"))
        logging.debug("Found log files:\n" +
                      "\n".join(map(lambda p: p.as_posix(), log_files)))
        assert len(log_files) >= 3
Esempio n. 8
0
    def __merged_items(self,
                       names: List[str],
                       check_conflicts: bool = False) -> Dict[str, ConfigItem]:
        # Delegate to all proxied servers + merge with local items
        merged_items = {}
        dump_all_filter = Filter(names=names, ignore_unknown=True)

        # Dump items from proxied clients
        items_dumps = [self.__filter_items(names)]
        for client in self.__proxied_config_clients:
            self.logger.debug(f"Dump items from remote ({client.target_host})")
            items_dumps.append(client.config.get(dump_all_filter).items)

        # Iterate on dumps
        for items_dump in items_dumps:
            # Merge proxied items
            for item in items_dump:
                if item.name in merged_items:
                    if item.value != merged_items[
                            item.name].value and check_conflicts:
                        # Conflict between services; needs to be raised as an error
                        raise RpcException(
                            f"Proxied values conflict for config item {item.name}: {item.value} != {merged_items[item.name].value}",
                            rc=ResultCode.ERROR_ITEM_CONFLICT,
                        )
                    else:
                        self.logger.debug(
                            f"Item {item.name} (value: {item.value}) already merged; keep previous value ({merged_items[item.name].value})"
                        )
                else:
                    # Grab this proxied item
                    merged_items[item.name] = item
                    self.logger.debug(
                        f"Item {item.name} merged (value: {item.value})")
        return merged_items
Esempio n. 9
0
 def test_get_info(self, client):
     # Try a "get info" call
     s = client.srv.info(Filter())
     assert len(s.items) == 5
     info = s.items[0]
     assert info.name == "srv"
     assert info.version == f"grpc-helper:{grpc_helper.__version__}"
     assert info.current_api_version == ServerApiVersion.SERVER_API_CURRENT
     assert info.supported_api_version == ServerApiVersion.SERVER_API_SUPPORTED
     info = s.items[1]
     assert info.name == "config"
     assert info.version == f"grpc-helper:{grpc_helper.__version__}"
     assert info.current_api_version == ConfigApiVersion.CONFIG_API_CURRENT
     assert info.supported_api_version == ConfigApiVersion.CONFIG_API_SUPPORTED
     info = s.items[2]
     assert info.name == "log"
     assert info.version == f"grpc-helper:{grpc_helper.__version__}"
     assert info.current_api_version == LoggerApiVersion.LOGGER_API_CURRENT
     assert info.supported_api_version == LoggerApiVersion.LOGGER_API_SUPPORTED
     info = s.items[3]
     assert info.name == "events"
     assert info.version == f"grpc-helper:{grpc_helper.__version__}"
     assert info.current_api_version == EventApiVersion.EVENT_API_CURRENT
     assert info.supported_api_version == EventApiVersion.EVENT_API_SUPPORTED
     info = s.items[4]
     assert info.name == "sample"
     assert info.version == f"grpc-helper:{grpc_helper.__version__}"
     assert info.current_api_version == SampleApiVersion.SAMPLE_API_CURRENT
     assert info.supported_api_version == SampleApiVersion.SAMPLE_API_SUPPORTED
Esempio n. 10
0
    def reset(self, request: Filter) -> ConfigStatus:
        """
        Reset configuration items, according to input filter
        """

        with self.lock:
            # Basic checks
            merged_items = self.__merged_items(request.names, False)
            self.__check_items(request.names, merged_items,
                               request.ignore_unknown)

            # Delegate to proxied servers
            for client in self.__proxied_config_clients:
                self.logger.debug(
                    f"Reset items on remote ({client.target_host})")
                client.config.reset(
                    Filter(names=request.names, ignore_unknown=True))

            # Reset all local items to their default values
            for name in filter(lambda n: n in self.user_items, request.names):
                self.user_items[name].reset()

            # Get again to build returned values
            merged_items = self.__merged_items(request.names, True)
            return ConfigStatus(items=merged_items.values())
Esempio n. 11
0
 def test_get_root(self, client):
     # Try with root logger
     s = client.log.get(Filter(names=[""]))
     assert len(s.items) == 1
     lc = s.items[0]
     assert lc.name == "root"
     assert lc.enabled
     assert lc.level == LoggerLevel.LVL_DEBUG  # Because of pytest initialization; in production, should be Warning by default
Esempio n. 12
0
    def test_get_custom(self, client):
        # Try with default
        ln = "sample_logger"
        s = client.log.get(Filter(names=[ln]))
        assert len(s.items) == 1
        lc = s.items[0]
        assert lc.name == ln
        assert lc.enabled
        assert lc.level == LoggerLevel.LVL_UNKNOWN

        # Check known level
        logging.getLogger(ln).setLevel(logging.WARNING)
        s = client.log.get(Filter(names=[ln]))
        assert s.items[0].level == LoggerLevel.LVL_WARNING

        # Check unknown level
        logging.getLogger(ln).setLevel(12)
        s = client.log.get(Filter(names=[ln]))
        assert s.items[0].level == LoggerLevel.LVL_UNKNOWN
Esempio n. 13
0
 def _proxied_servers(self) -> Set[Tuple[str, int]]:
     # Tuples of remote RPC server host,port for each registered proxied service
     proxied_servers = set()
     for service_info in filter(
             lambda si: si.is_proxy and si.proxy_port > 0,
             self.client.srv.info(Filter()).items):
         proxied_servers.add(
             (service_info.proxy_host if len(service_info.proxy_host) else
              RpcStaticConfig.MAIN_HOST.str_val, service_info.proxy_port))
     return proxied_servers
Esempio n. 14
0
    def test_proxy_config_set_n_reset(self, proxy_server, client,
                                      another_server):
        # Register proxies
        proxy_server.client.srv.proxy_register(
            ProxyRegisterRequest(names=["sample"],
                                 version="123",
                                 port=self.rpc_port))

        # Set in proxy
        s = proxy_server.client.config.set(
            ConfigUpdate(
                items=[ConfigItemUpdate(name="my-int-config", value="789")]))
        assert len(s.items) == 1
        item = s.items[0]
        assert item.name == "my-int-config"
        assert item.value == "789"

        # Read (all values shall be the same)
        for c in (client, another_server.client, proxy_server.client):
            s = c.config.get(Filter(names=["my-int-config"]))
            assert len(s.items) == 1
            item = s.items[0]
            assert item.name == "my-int-config"
            assert item.value == "789"

        # Reset
        s = proxy_server.client.config.reset(Filter(names=["my-int-config"]))
        assert len(s.items) == 1
        item = s.items[0]
        assert item.name == "my-int-config"
        assert item.value == "12"

        # Read (all values shall be the same)
        for c in (client, another_server.client, proxy_server.client):
            s = c.config.get(Filter(names=["my-int-config"]))
            assert len(s.items) == 1
            item = s.items[0]
            assert item.name == "my-int-config"
            assert item.value == "12"
Esempio n. 15
0
    def test_proxy_config_get(self, proxy_server, client, another_server):
        # Register proxies
        proxy_server.client.srv.proxy_register(
            ProxyRegisterRequest(names=["sample"],
                                 version="123",
                                 port=self.rpc_port))

        # Read (all values shall be the same)
        for c in (client, another_server.client, proxy_server.client):
            s = c.config.get(Filter(names=["my-int-config"]))
            assert len(s.items) == 1
            item = s.items[0]
            assert item.name == "my-int-config"
            assert item.value == "12"
Esempio n. 16
0
    def method1(self, request: Empty) -> ResultStatus:
        self.logger.info("In SampleServicer.method1!!!")

        # Sleep if requested
        if self.wait_a_bit:
            time.sleep(3)

        # Use auto-client to access other services (only if not shutdown in the meantime)
        s = None
        if not self.is_shutdown:
            s = self.client.srv.info(Filter())

        return ResultStatus(r=Result(
            msg=f"Found info count: {len(s.items) if s is not None else None}")
                            )
Esempio n. 17
0
    def __finalize_shutdown(self, terminating_server: Server,
                            request: ShutdownRequest):
        # Wait for all pending requests to be terminated
        self.logger.debug("Waiting to terminate all requests")
        terminating_server.wait_for_termination()
        self.logger.debug(f"RPC server shut down on port {self.__port}")

        # Need to wait before real shutdown?
        # This may be useful to avoid being restarted by an orchestration manager (e.g. Docker Swarm), typically when doing a graceful shutdown before upgrade
        if request is not None and request.timeout >= 0:
            timeout = request.timeout if request.timeout > 0 else RpcStaticConfig.SHUTDOWN_TIMEOUT.int_val
            self.logger.warning(f"!!! Will shutdown in {timeout}s !!!")
            time.sleep(timeout)

        # Hack auto client to remove timeout
        self.client.srv.info.timeout = None

        # Just make sure that client calls are not working anymore with current instance
        # (Sometimes, it appears that the internal implementation is a bit lazy to close...)
        self.logger.debug(
            "Trying a last client call to make sure server socket is closed (following ERROR is normal)"
        )
        while True:
            try:
                # Try a client call
                self.client.srv.info(Filter())

                # Shouldn't get here; if so, wait a bit and retry
                time.sleep(0.2)  # pragma: no cover
            except Exception:
                # Ok, client is closed
                break

        # Removing all rotating loggers
        for descriptor in self.__real_descriptors:
            clean_rotating_handler(descriptor.manager.logger)

        # Remove rotating handler for current + root loggers
        clean_rotating_handler(logging.getLogger())
        self.__shutdown_event.set()
Esempio n. 18
0
    def test_proxy_config_conflict(self, proxy_server, client, another_server):
        # Register proxies
        proxy_server.client.srv.proxy_register(
            ProxyRegisterRequest(names=["sample"],
                                 version="123",
                                 port=self.rpc_port))

        # Update only on one proxied server
        s = another_server.client.config.set(
            ConfigUpdate(
                items=[ConfigItemUpdate(name="my-int-config", value="1024")]))
        assert len(s.items) == 1
        item = s.items[0]
        assert item.name == "my-int-config"
        assert item.value == "1024"

        # Read while values are different in proxied servers: conflict
        try:
            proxy_server.client.config.get(Filter(names=["my-int-config"]))
            raise AssertionError("shouldn't get here")
        except RpcException as e:
            assert e.rc == ResultCode.ERROR_ITEM_CONFLICT
Esempio n. 19
0
    def test_persisted_level(self):
        # Prepare persisted level file
        self.workspace_path.mkdir(parents=True, exist_ok=True)
        with self.loggers_file.open("w") as f:
            json.dump({
                "logger1": False,
                "logger2": "DEBUG",
                "logger3": "FOO"
            }, f)

        # Load server
        self.new_server_instance()

        # Verify "unknown level" trace
        self.check_logs("Ignoring unknown level FOO for logger logger3")

        # Verify configured level
        c = self.server.client
        names = ["logger1", "logger2", "logger3"]
        s = c.log.get(Filter(names=names))
        assert s.r.code == ResultCode.OK
        assert len(s.items) == len(names)
        for lc in s.items:
            assert lc.name in names
            if lc.name == "logger1":
                assert not lc.enabled
                assert lc.level == LoggerLevel.LVL_UNKNOWN
            if lc.name == "logger2":
                assert lc.enabled
                assert lc.level == LoggerLevel.LVL_DEBUG
            if lc.name == "logger3":
                assert lc.enabled
                assert lc.level == LoggerLevel.LVL_UNKNOWN

        # Shutdown server
        self.shutdown_server_instance()
Esempio n. 20
0
 def test_reset_unset(self, client):
     # Resetting unmodified logger should not be persisted
     client.log.reset(Filter(names=["unmodified"]))
     assert not self.loggers_file.is_file()
Esempio n. 21
0
    def test_set_ok(self, client):
        # Verify file is not persisted yet
        wks = self.test_folder / "wks"
        cfg = wks / "config.json"
        assert not cfg.is_file()

        # Set new value
        s = client.config.set(
            ConfigUpdate(
                items=[ConfigItemUpdate(name="my-int-config", value="999")]))
        assert len(s.items) == 1
        item = s.items[0]
        assert item.name == "my-int-config"
        assert item.value == "999"

        # File is persisted
        assert cfg.is_file()

        # Read again to make sure :)
        s = client.config.get(Filter(names=["my-int-config"]))
        assert len(s.items) == 1
        item = s.items[0]
        assert item.name == "my-int-config"
        assert item.value == "999"

        # Reload to verify persistence
        self.shutdown_server_instance()
        self.new_server_instance()

        # Read again
        s = client.config.get(Filter(names=["my-int-config"]))
        assert len(s.items) == 1
        item = s.items[0]
        assert item.name == "my-int-config"
        assert item.value == "999"

        # Reload to verify ignored persistence if no workspace (but update logs folder anyway)
        self.shutdown_server_instance()
        os.environ["RPC_LOGS_FOLDER"] = (self.test_folder /
                                         "custom_log_full_path").as_posix()
        self.new_server_instance(with_workspace=False)
        cfg.unlink()

        # Set new value; will not be persisted
        s = client.config.set(
            ConfigUpdate(
                items=[ConfigItemUpdate(name="my-int-config", value="888")]))
        assert not cfg.is_file()

        # Write an invalid persisted value
        with cfg.open("w") as f:
            json.dump({"my-int-config": "invalid string"}, f)

        # Reload to verify invalid value being ignored (and restore default logs folder)
        self.shutdown_server_instance()
        del os.environ["RPC_LOGS_FOLDER"]
        self.new_server_instance()
        self.check_logs(
            "Can't load invalid persisted value 'invalid string' for config item my-int-config"
        )

        # Read again (should be default value)
        s = client.config.get(Filter(names=["my-int-config"]))
        assert len(s.items) == 1
        item = s.items[0]
        assert item.name == "my-int-config"
        assert item.value == "12"
Esempio n. 22
0
    def test_proxy_register_n_forget(self, proxy_server, client):
        # First list
        s = proxy_server.client.srv.info(Filter(names=["sample"]))
        info = s.items[0]
        assert info.name == "sample"
        assert info.is_proxy
        assert info.proxy_host == ""
        assert info.proxy_port == 0
        assert info.version == f"grpc-helper:{grpc_helper.__version__}"

        # No persistence
        proxy_config = self.proxy_workspace / "proxy.json"
        assert not proxy_config.exists()

        # Register
        proxy_server.client.srv.proxy_register(
            ProxyRegisterRequest(names=["sample"],
                                 version="123",
                                 port=self.rpc_port,
                                 host="localhost"))

        # Verify persistence
        assert proxy_config.exists()
        with proxy_config.open("r") as f:
            model = json.load(f)
        assert "sample" in model

        # List again
        s = proxy_server.client.srv.info(Filter(names=["sample"]))
        info = s.items[0]
        assert info.name == "sample"
        assert info.is_proxy
        assert info.proxy_host == "localhost"
        assert info.proxy_port == self.rpc_port
        assert info.version == "123"

        # Try a simple call
        s = proxy_server.client.sample.method1(Empty())
        assert s.r.msg == "Found info count: 5"

        # Shutdown / reload to verify persistence
        proxy_server.shutdown()
        proxy_server = self.new_proxy_server()

        # Try a simple call again
        s = proxy_server.client.sample.method1(Empty())
        assert s.r.msg == "Found info count: 5"

        # Forget
        proxy_server.client.srv.proxy_forget(Filter(names=["sample"]))

        # Verify persistence
        assert proxy_config.exists()
        with proxy_config.open("r") as f:
            model = json.load(f)
        assert len(model) == 0

        # List again
        s = proxy_server.client.srv.info(Filter(names=["sample"]))
        info = s.items[0]
        assert info.name == "sample"
        assert info.is_proxy
        assert info.proxy_host == ""
        assert info.proxy_port == 0
        assert info.version == "123"
Esempio n. 23
0
 def _shutdown(self):
     # Forget from proxy (if it was registered)
     if self.proxy_client is not None:
         self.proxy_client.srv.proxy_forget(
             Filter(names=self._proxied_services()))