コード例 #1
0
 def __init__(self, grpc_channel=None, logger=None):
     self.logger = logger if logger is not None else create_logger(
         "BayesianOptimizerFactory")
     self._grpc_channel = grpc_channel
     self._optimizer_service_stub = None
     if self._grpc_channel is not None:
         self._optimizer_service_stub = OptimizerServiceStub(
             channel=self._grpc_channel)
コード例 #2
0
 def __init__(self, grpc_channel, logger=None):
     self.logger = logger if logger is not None else create_logger(
         "OptimizerMonitor")
     self._grpc_channel = grpc_channel
     self._optimizer_service_stub = OptimizerServiceStub(
         channel=self._grpc_channel)
     self._optimizer_factory = BayesianOptimizerFactory(
         grpc_channel=self._grpc_channel, logger=self.logger)
コード例 #3
0
    def __init__(
            self,
            grpc_channel,
            optimization_problem,
            optimizer_config,
            id,  # pylint: disable=redefined-builtin
            logger=None
    ):
        if logger is None:
            logger = create_logger("BayesianOptimizerClient")
        self.logger = logger

        OptimizerBase.__init__(self, optimization_problem)
        assert optimizer_config is not None

        self._grpc_channel = grpc_channel
        self._optimizer_stub = OptimizerServiceStub(self._grpc_channel)
        self._optimizer_monitoring_stub = OptimizerMonitoringServiceStub(self._grpc_channel)
        self.optimizer_config = optimizer_config
        self.id = id
コード例 #4
0
class OptimizerMonitor:
    """ Enables monitoring optimizers existing within the OptimizerMicroservice.

    """
    def __init__(self, grpc_channel, logger=None):
        self.logger = logger if logger is not None else create_logger(
            "OptimizerMonitor")
        self._grpc_channel = grpc_channel
        self._optimizer_service_stub = OptimizerServiceStub(
            channel=self._grpc_channel)
        self._optimizer_factory = BayesianOptimizerFactory(
            grpc_channel=self._grpc_channel, logger=self.logger)

    def get_existing_optimizers(self):
        """ Returns proxies to all existing optimizers.

        :return:
        """
        request = Empty()
        optimizer_list = self._optimizer_service_stub.ListExistingOptimizers(
            request)

        optimizer_proxies = [
            self._optimizer_factory.connect_to_existing_optimizer(
                optimizer_info) for optimizer_info in optimizer_list.Optimizers
        ]
        return optimizer_proxies

    def get_optimizer_by_id(self, optimizer_id):
        """ Returns a proxy to an optimizer with a specified Id.

        :param optimizer_id:
        :return:
        """
        optimizer_handle = OptimizerHandle(Id=optimizer_id)
        optimizer_info = self._optimizer_service_stub.GetOptimizerInfo(
            optimizer_handle)
        optimizer_proxy = self._optimizer_factory.connect_to_existing_optimizer(
            optimizer_info)
        return optimizer_proxy
コード例 #5
0
 def test_echo(self):
     optimizer_service_stub = OptimizerServiceStub(channel=self.optimizer_service_channel)
     response = optimizer_service_stub.Echo(Empty())
     self.assertTrue(isinstance(response, Empty))
コード例 #6
0
class BayesianOptimizerFactory:
    """Produces BayesianOptimizerProxies either by connecting them to either a new or an existing remote bayesian optimizer.

    Parameters
    ----------
    grpc_channel : int
        Port for the grpc communication channel.

    logger : Logger
        Logger object
    """
    def __init__(self, grpc_channel, logger=None):
        self.logger = logger if logger is not None else create_logger(
            "BayesianOptimizerFactory")
        self._grpc_channel = grpc_channel
        self._optimizer_service_stub = OptimizerServiceStub(
            channel=self._grpc_channel)

    def create_remote_optimizer(self,
                                optimization_problem: OptimizationProblem,
                                optimizer_config: Point = None):
        """Creates a remote optimizer over a given problem with a given config.

        Parameters
        ----------
        optimization_problem : OptimizationProblem
            Optimization problem for the new remote optimizer.

        optimizer_config : Point
            Configuration for the new remote optimizer.


        Returns
        -------
        BayesianOptimizerProxy

        """
        if optimizer_config is None:
            optimizer_config = BayesianOptimizerConfig.DEFAULT

        create_optimizer_request = CreateOptimizerRequest(
            OptimizationProblem=optimization_problem.to_protobuf(),
            OptimizerConfigName='',  # TODO: add this functionality
            OptimizerConfig=optimizer_config.to_json())

        optimizer_handle = self._optimizer_service_stub.CreateOptimizer(
            create_optimizer_request)

        return BayesianOptimizerProxy(
            grpc_channel=self._grpc_channel,
            optimization_problem=optimization_problem,
            optimizer_config=optimizer_config,
            id=optimizer_handle.Id,
            logger=self.logger)

    def connect_to_existing_optimizer(self, optimizer_info: OptimizerInfo):
        """Connects to an existing optimizer.

        Parameters
        ----------
        optimizer_info : OptimizerInfo

        Returns
        -------
        BayesianOptimizerProxy
        """
        return BayesianOptimizerProxy(
            grpc_channel=self._grpc_channel,
            optimization_problem=OptimizationProblem.from_protobuf(
                optimizer_info.OptimizationProblem),
            optimizer_config=Point.from_json(
                optimizer_info.OptimizerConfigJsonString),
            id=optimizer_info.OptimizerHandle.Id,
            logger=self.logger)
コード例 #7
0
class BayesianOptimizerFactory:
    """Produces BayesianOptimizerProxies either by connecting them to either a new or an existing remote bayesian optimizer.

    Parameters
    ----------
    grpc_channel : int
        Port for the grpc communication channel.

    logger : Logger
        Logger object
    """

    def __init__(self, grpc_channel=None, logger=None):
        self.logger = logger if logger is not None else create_logger("BayesianOptimizerFactory")
        self._grpc_channel = grpc_channel
        self._optimizer_service_stub = None
        if self._grpc_channel is not None:
            self._optimizer_service_stub = OptimizerServiceStub(channel=self._grpc_channel)


    def create_local_optimizer(self, optimization_problem: OptimizationProblem, optimizer_config: Point = None) -> BayesianOptimizer:
        if optimizer_config is None:
            self.logger.info(f"Optimizer config not specified. Using default.")
            optimizer_config = bayesian_optimizer_config_store.default

        self.logger.info(f"Creating a bayesian optimizer with config: {optimizer_config.to_json(indent=2)}.")

        return BayesianOptimizer(
            optimization_problem=optimization_problem,
            optimizer_config=optimizer_config,
            logger=self.logger
        )


    def create_remote_optimizer(self, optimization_problem: OptimizationProblem, optimizer_config: Point = None) -> BayesianOptimizerProxy:
        """Creates a remote optimizer over a given problem with a given config.

        Parameters
        ----------
        optimization_problem : OptimizationProblem
            Optimization problem for the new remote optimizer.

        optimizer_config : Point
            Configuration for the new remote optimizer.


        Returns
        -------
        BayesianOptimizerProxy

        """
        assert self._optimizer_service_stub is not None

        if optimizer_config is None:
            optimizer_config = bayesian_optimizer_config_store.default

        create_optimizer_request = CreateOptimizerRequest(
            OptimizationProblem=OptimizerServiceEncoder.encode_optimization_problem(optimization_problem),
            OptimizerConfigName='', # TODO: add this functionality
            OptimizerConfig=optimizer_config.to_json()
        )

        optimizer_handle = self._optimizer_service_stub.CreateOptimizer(create_optimizer_request)
        self.logger.info(f"Created bayesian optimizer with id: {optimizer_handle.Id} with config: {optimizer_config.to_json(indent=2)}.")

        return BayesianOptimizerProxy(
            grpc_channel=self._grpc_channel,
            optimization_problem=optimization_problem,
            optimizer_config=optimizer_config,
            id=optimizer_handle.Id,
            logger=self.logger
        )

    def connect_to_existing_remote_optimizer(self, optimizer_info: OptimizerInfo) -> BayesianOptimizerProxy:
        """Connects to an existing optimizer.

        Parameters
        ----------
        optimizer_info : OptimizerInfo

        Returns
        -------
        BayesianOptimizerProxy
        """
        return BayesianOptimizerProxy(
            grpc_channel=self._grpc_channel,
            optimization_problem=OptimizerServiceDecoder.decode_optimization_problem(optimizer_info.OptimizationProblem),
            optimizer_config=Point.from_json(optimizer_info.OptimizerConfigJsonString),
            id=optimizer_info.OptimizerHandle.Id,
            logger=self.logger
        )
コード例 #8
0
class BayesianOptimizerProxy(OptimizerBase):
    """ Client to remote BayesianOptimizer.

    Wraps all implementation details around communicating with the remote BayesianOptimizer.
    Benefits:
        * Simpler to use than making gRPC requests
        * We can change the gRPC definition without affecting the user's code.
        * All logic related to gRPC is in one place

    Parameters
    ----------
    grpc_channel : grpc_channel
        GRPC channel to connect to existing remote optimizer.
    optimization_problem : OptimizationProblem
        Problem to optimizer.
    optimizer_config : Point
        Optimizer Configuation.
    id : str
        Unique identifying string.
    logger : logger, default=None
        Logger to use. By default, a new logger is created internally.
    """

    def __init__(
            self,
            grpc_channel,
            optimization_problem,
            optimizer_config,
            id,  # pylint: disable=redefined-builtin
            logger=None
    ):
        if logger is None:
            logger = create_logger("BayesianOptimizerClient")
        self.logger = logger

        OptimizerBase.__init__(self, optimization_problem)
        assert optimizer_config is not None

        self._grpc_channel = grpc_channel
        self._optimizer_stub = OptimizerServiceStub(self._grpc_channel)
        self._optimizer_monitoring_stub = OptimizerMonitoringServiceStub(self._grpc_channel)
        self.optimizer_config = optimizer_config
        self.id = id

    @property
    def optimizer_handle_for_optimizer_monitoring_service(self):
        return OptimizerMonitoringService_pb2.OptimizerHandle(Id=self.id)

    @property
    def optimizer_handle_for_optimizer_service(self):
        return OptimizerService_pb2.OptimizerHandle(Id=self.id)

    @property
    def trained(self):
        response = self._optimizer_monitoring_stub.IsTrained(self.optimizer_handle_for_optimizer_monitoring_service)
        return response.Value

    @trace()
    def get_optimizer_convergence_state(self):
        optimizer_convergence_state_response = self._optimizer_monitoring_stub.GetOptimizerConvergenceState(
            self.optimizer_handle_for_optimizer_monitoring_service
        )
        return deserialize_from_bytes_string(optimizer_convergence_state_response.SerializedOptimizerConvergenceState)

    @trace()
    def compute_surrogate_model_goodness_of_fit(self):
        response = self._optimizer_monitoring_stub.ComputeGoodnessOfFitMetrics(self.optimizer_handle_for_optimizer_monitoring_service)
        return MultiObjectiveGoodnessOfFitMetrics.from_json(response.Value, objective_names=self.optimization_problem.objective_space.dimension_names)

    @trace()
    def suggest(self, random=False, context=None):  # pylint: disable=unused-argument
        if context is not None:
            raise NotImplementedError("Context not currently supported on remote optimizers")

        suggestion_request = OptimizerService_pb2.SuggestRequest(
            OptimizerHandle=self.optimizer_handle_for_optimizer_service,
            Random=random,
            Context=context
        )
        suggestion_response = self._optimizer_stub.Suggest(suggestion_request)
        suggested_params_dict = json.loads(suggestion_response.ParametersJsonString)
        return Point(**suggested_params_dict)

    @trace()
    def register(self, parameter_values_pandas_frame, target_values_pandas_frame, context_values_pandas_frame=None):
        if context_values_pandas_frame is not None:
            raise NotImplementedError("Context not currently supported on remote optimizers")

        feature_values_pandas_frame = parameter_values_pandas_frame
        register_request = OptimizerService_pb2.RegisterObservationsRequest(
            OptimizerHandle=self.optimizer_handle_for_optimizer_service,
            Observations=OptimizerService_pb2.Observations(
                Features=OptimizerService_pb2.Features(FeaturesJsonString=feature_values_pandas_frame.to_json(orient='index', double_precision=15)),
                ObjectiveValues=OptimizerService_pb2.ObjectiveValues(
                    ObjectiveValuesJsonString=target_values_pandas_frame.to_json(orient='index', double_precision=15)
                )
            )
        )
        self._optimizer_stub.RegisterObservations(register_request) # TODO: we should be using the optimizer_stub for this.

    @trace()
    def get_all_observations(self) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:
        response = self._optimizer_monitoring_stub.GetAllObservations(self.optimizer_handle_for_optimizer_monitoring_service)
        features_df = pd.read_json(response.Features.FeaturesJsonString, orient='index')
        objectives_df = pd.read_json(response.ObjectiveValues.ObjectiveValuesJsonString, orient='index')
        context_df = None
        return features_df, objectives_df, context_df

    @trace()
    def predict(self, parameter_values_pandas_frame, t=None, context_values_pandas_frame=None, objective_name=None) -> Prediction:  # pylint: disable=unused-argument
        # TODO: make this streaming and/or using arrow.
        #
        if context_values_pandas_frame is not None:
            raise NotImplementedError("Context not currently supported on remote optimizers")
        feature_values_dict = parameter_values_pandas_frame.to_dict(orient='list')
        prediction_request = OptimizerMonitoringService_pb2.PredictRequest(
            OptimizerHandle=self.optimizer_handle_for_optimizer_monitoring_service,
            Features=OptimizerMonitoringService_pb2.Features(
                FeaturesJsonString=json.dumps(feature_values_dict)
            )
        )
        prediction_response = self._optimizer_monitoring_stub.Predict(prediction_request)

        # To be compliant with the OptimizerBase, we need to recover a single Prediction object and return it.
        #
        objective_predictions_pb2 = prediction_response.ObjectivePredictions
        assert len(objective_predictions_pb2) == 1
        only_prediction_pb2 = objective_predictions_pb2[0]
        objective_name = only_prediction_pb2.ObjectiveName
        valid_predictions_df = Prediction.dataframe_from_json(only_prediction_pb2.PredictionDataFrameJsonString)
        prediction = Prediction.create_prediction_from_dataframe(objective_name=objective_name, dataframe=valid_predictions_df)
        return prediction

    def focus(self, subspace):  # pylint: disable=unused-argument,no-self-use
        pass

    def reset_focus(self):# pylint: disable=no-self-use
        pass