Exemplo n.º 1
0
    def check_instantiation(self):
        """
        Runs a series of checks to ensure that the user has not supplied
        contradictory arguments which will result in undefined behavior
        with multiple hyperparameters.
        :return:
        """

        if self.logger_name is None:
            if self.output is None:
                self.logger_name = self.name + "GaussianProcess"
                set_logger(
                    self.logger_name,
                    stream=True,
                    fileout_name=None,
                    verbose=self.verbose,
                )
            else:
                self.logger_name = self.output.basename + "log"
        logger = logging.getLogger(self.logger_name)

        # check whether it's be loaded before
        loaded = False
        if self.name + "_0" in _global_training_labels:
            if (_global_training_labels.get(self.name + "_0", None)
                    is not self.training_labels_np):
                loaded = True
        if self.name + "_0" in _global_energy_labels:
            if (_global_energy_labels.get(self.name + "_0", None)
                    is not self.energy_labels_np):
                loaded = True

        if loaded:

            base = f"{self.name}"
            count = 2
            while self.name + "_0" in _global_training_labels and count < 100:
                time.sleep(random())
                self.name = f"{base}_{count}"
                logger.debug("Specified GP name is present in global memory; "
                             "Attempting to rename the "
                             f"GP instance to {self.name}")
                count += 1
            if self.name + "_0" in _global_training_labels:
                milliseconds = int(round(time.time() * 1000) % 10000000)
                self.name = f"{base}_{milliseconds}"
                logger.debug(
                    "Specified GP name still present in global memory: "
                    f"renaming the gp instance to {self.name}")
            logger.debug(f"Final name of the gp instance is {self.name}")

        self.sync_data()

        self.hyps_mask = Parameters.check_instantiation(
            self.hyps, self.cutoffs, self.kernels, self.hyps_mask)
Exemplo n.º 2
0
    def check_instantiation(self):
        """
        Runs a series of checks to ensure that the user has not supplied
        contradictory arguments which will result in undefined behavior
        with multiple hyperparameters.
        :return:
        """
        logger = logging.getLogger(self.logger_name)

        # check whether it's be loaded before
        loaded = False
        if self.name in _global_training_labels:
            if _global_training_labels.get(self.name,
                                           None) is not self.training_labels_np:
                loaded = True
        if self.name in _global_energy_labels:
            if _global_energy_labels.get(self.name,
                                         None) is not self.energy_labels_np:
                loaded = True

        if loaded:

            base = f'{self.name}'
            count = 2
            while self.name in _global_training_labels and count < 100:
                time.sleep(random())
                self.name = f'{base}_{count}'
                logger.debug("Specified GP name is present in global memory; "
                             "Attempting to rename the "
                             f"GP instance to {self.name}")
                count += 1
            if self.name in _global_training_labels:
                milliseconds = int(round(time.time() * 1000) % 10000000)
                self.name = f"{base}_{milliseconds}"
                logger.debug(
                    "Specified GP name still present in global memory: "
                    f"renaming the gp instance to {self.name}")
            logger.debug(f"Final name of the gp instance is {self.name}")

        self.sync_data()

        self.hyps_mask = Parameters.check_instantiation(
            hyps=self.hyps, cutoffs=self.cutoffs, kernels=self.kernels,
            param_dict=self.hyps_mask)

        self.bounds = deepcopy(self.hyps_mask.get('bounds', None))