Exemple #1
0
 def get_class_wise_distance_series(self, distance: str):
     X_hat = self.__X_hat
     X = self.__dataset.X
     distance_func = distance_func_numpy(distance)
     series = [
         distance_func(X_hat[:, i], X[:, i]) for i in range(self.n_classes)
     ]
     return series
Exemple #2
0
 def get_sample_wise_distance_series(self, distance: str):
     X_hat = self.__X_hat
     X = self.__dataset.X
     distance_func = distance_func_numpy(distance)
     series = [
         distance_func(X_hat[i, :], X[i, :]) for i in range(self.n_samples)
     ]
     return series
Exemple #3
0
 def get_distance(self, distance: str):
     distance_func = distance_func_numpy(distance)
     distribution = BaseDistribution.get_distribution(
         self.__distribution_type, self.__n_components)
     values = distribution.mixed_function(self.__sample.classes_φ,
                                          *self.__mixed_func_args)
     targets = self.__sample.distribution
     distance = distance_func(values, targets)
     return distance
Exemple #4
0
 def get_distance_series(self, distance: str):
     distance_series = []
     distance_func = distance_func_numpy(distance)
     X = self.__dataset.X
     for fractions, end_members in self.__history:
         X_hat = fractions @ end_members
         distance = distance_func(X_hat, X)
         distance_series.append(distance)
     return distance_series
Exemple #5
0
 def get_distance_series(self, distance: str):
     distance_func = distance_func_numpy(distance)
     distribution = BaseDistribution.get_distribution(
         self.__distribution_type, self.__n_components)
     distance_series = []
     for func_args in self.__history:
         values = distribution.mixed_function(self.__sample.classes_φ,
                                              *func_args)
         targets = self.__sample.distribution
         distance = distance_func(values, targets)
         distance_series.append(distance)
     return distance_series
Exemple #6
0
    def try_fit(self, task: SSUTask) -> typing.Tuple[FittingState, object]:
        assert task.resolver == "classic"
        history = []
        distribution = BaseDistribution.get_distribution(
            task.distribution_type, task.n_components)
        if task.resolver_setting is None:
            setting = ClassicResolverSetting()
        else:
            assert isinstance(task.resolver_setting, ClassicResolverSetting)
            setting = task.resolver_setting
        distance = distance_func_numpy(setting.distance)
        start_time = time.time()
        self.on_fitting_started()
        use_weights = False
        if use_weights:
            weights = self.get_weights(task.sample.classes_φ,
                                       task.sample.distribution)

            def closure(params):
                params[-task.n_components:] = np.abs(
                    params[-task.n_components:])
                current_values = distribution.mixed_function(
                    task.sample.classes_φ, *params)
                return distance(current_values * weights,
                                task.sample.distribution * weights)
        else:

            def closure(params):
                params[-task.n_components:] = np.abs(
                    params[-task.n_components:])
                current_values = distribution.mixed_function(
                    task.sample.classes_φ, *params)
                return distance(current_values, task.sample.distribution)

        def local_callback(mixed_func_args, *addtional):
            history.append(mixed_func_args)
            self.local_iteration_callback(mixed_func_args)

        initial_guess = task.initial_guess
        if task.initial_guess is None:
            initial_guess = np.array(distribution.defaults)

        if task.reference is not None:
            assert len(task.reference) == task.n_components
            initial_guess = BaseDistribution.get_initial_guess(
                task.distribution_type, task.reference)

        if setting.minimizer == "trust-constr":
            GO_options = {
                "maxiter": setting.GO_minimizer_max_niter,
                #    "ftol": setting.GO_minimizer_ftol,
                "disp": False
            }
            FLO_options = {
                "maxiter": setting.FLO_max_niter,
                #    "ftol": setting.FLO_ftol,
                "disp": False
            }
        else:
            GO_options = {
                "maxiter": setting.GO_minimizer_max_niter,
                "ftol": setting.GO_minimizer_ftol,
                "disp": False
            }
            FLO_options = {
                "maxiter": setting.FLO_max_niter,
                "ftol": setting.FLO_ftol,
                "disp": False
            }

        if setting.try_GO:
            global_optimization_minimizer_kwargs = \
                dict(method=setting.minimizer,
                     tol=setting.GO_minimizer_tol,
                     bounds=distribution.bounds,
                     constraints=distribution.constrains,
                     callback=local_callback,
                     options=GO_options)

            GO_result = \
                basinhopping(closure, x0=initial_guess,
                            minimizer_kwargs=global_optimization_minimizer_kwargs,
                            callback=self.global_iteration_callback,
                            niter_success=setting.GO_success_niter,
                            niter=setting.GO_max_niter,
                            stepsize=setting.GO_step)

            if GO_result.lowest_optimization_result.success or \
                    GO_result.lowest_optimization_result.status == 9:
                self.on_global_fitting_succeeded(GO_result)
                initial_guess = GO_result.x
            else:
                self.on_global_fitting_failed(GO_result)
                self.on_fitting_finished()
                return FittingState.Failed, GO_result

        FLO_result = \
            minimize(closure, method=setting.minimizer,
                    x0=initial_guess,
                    tol=setting.FLO_tol,
                    bounds=distribution.bounds,
                    constraints=distribution.constrains,
                    callback=local_callback,
                    options=FLO_options)
        # judge if the final fitting succeed
        # see https://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.fmin_slsqp.html
        # When the minimizer is "Nelder-Mead", it will return failed result if it has reached the max niter
        if FLO_result.success or FLO_result.status == 9 or setting.minimizer == "Nelder-Mead" or setting.minimizer == "trust-constr":
            finish_time = time.time()
            self.on_fitting_finished()
            time_spent = finish_time - start_time
            fitting_result = SSUResult(task,
                                       FLO_result.x,
                                       history=history,
                                       time_spent=time_spent)
            self.on_fitting_succeeded(FLO_result, fitting_result)
            return FittingState.Succeeded, fitting_result
        else:
            self.on_final_fitting_failed(FLO_result)
            self.on_fitting_finished()
            return FittingState.Failed, FLO_result
Exemple #7
0
 def get_distance(self, distance: str):
     distance_func = distance_func_numpy(distance)
     return distance_func(self.__X_hat, self.__dataset.X)