Exemple #1
0
def save_interim_potential(basis_config: BBasisConfiguration, coeffs=None, potential_filename="interim_potential.yaml",
                           verbose=True):
    if coeffs is not None:
        basis_config = basis_config.copy()
        safely_update_bbasisconfiguration_coefficients(coeffs, basis_config)
    basis_config.metadata["intermediate_time"] = str(datetime.now())
    basis_config.save(potential_filename)
    if verbose:
        log.info('Intermediate potential saved in {}'.format(potential_filename))
Exemple #2
0
    def callback_hook(self, coeffs, basis_config: BBasisConfiguration, current_fit_cycle: int,
                      current_ladder_step: int):
        # TODO add a list of callbacks

        basis_config = basis_config.copy()
        safely_update_bbasisconfiguration_coefficients(coeffs, basis_config)
        for callback in self.callbacks:
            callback(
                basis_config=basis_config,
                current_fit_iteration=self.current_fit_iteration,
                current_fit_cycle=current_fit_cycle,
                current_ladder_step=current_ladder_step,
            )
        self.current_fit_iteration += 1
Exemple #3
0
    def cycle_fitting(self, bbasisconfig: BBasisConfiguration, current_ladder_step: int = 0) -> BBasisConfiguration:
        current_bbasisconfig = bbasisconfig.copy()
        log.info('Cycle fitting loop')

        fit_cycles = int(self.fit_config.get(FIT_FIT_CYCLES_KW, 1))
        noise_rel_sigma = float(self.fit_config.get(FIT_NOISE_REL_SIGMA, 0))
        noise_abs_sigma = float(self.fit_config.get(FIT_NOISE_ABS_SIGMA, 0))

        if "_" + FIT_FIT_CYCLES_KW in current_bbasisconfig.metadata:
            finished_fit_cycles = int(current_bbasisconfig.metadata["_" + FIT_FIT_CYCLES_KW])
        else:
            finished_fit_cycles = 0

        if finished_fit_cycles >= fit_cycles:
            log.warning(
                ("Number of finished fit cycles ({}) >= number of expected fit cycles ({}). " +
                 "Use another potential or remove `{}` from potential metadata")
                    .format(finished_fit_cycles, fit_cycles, "_" + FIT_FIT_CYCLES_KW))
            return current_bbasisconfig

        fitting_attempts_list = []
        while finished_fit_cycles < fit_cycles:
            current_fit_cycle = finished_fit_cycles + 1
            log.info("Number of fit attempts: {}/{}".format(current_fit_cycle, fit_cycles))
            num_of_functions = current_bbasisconfig.total_number_of_functions
            num_of_parameters = len(current_bbasisconfig.get_all_coeffs())
            log.info("Total number of functions: {} / number of parameters: {}".format(num_of_functions,
                                                                                       num_of_parameters))
            log.info("Running fit backend")
            self.current_fit_iteration = 0
            current_bbasisconfig = self.fit_backend.fit(
                current_bbasisconfig,
                dataframe=self.fitting_data, loss_spec=self.loss_spec, fit_config=self.fit_config,
                callback=partial(self.callback_hook, basis_config=bbasisconfig, current_fit_cycle=current_fit_cycle,
                                 current_ladder_step=current_ladder_step)
            )

            log.info("Fitting cycle finished, final statistic:")
            self.fit_backend.print_detailed_metrics(prefix='Last iteration:')

            finished_fit_cycles = current_fit_cycle

            current_bbasisconfig.metadata["_" + FIT_FIT_CYCLES_KW] = str(finished_fit_cycles)
            current_bbasisconfig.metadata["_" + FIT_LOSS_KW] = str(self.fit_backend.res_opt.fun)
            log.debug("Update current_bbasisconfig.metadata = {}".format(current_bbasisconfig.metadata))

            fitting_attempts_list.append((np.sum(self.fit_backend.res_opt.fun), current_bbasisconfig.copy()))

            # select current_bbasisconfig as a best among all previous
            best_ind = np.argmin([v[0] for v in fitting_attempts_list])
            log.info(
                "Select best fit #{} among all available ({})".format(best_ind + 1, len(fitting_attempts_list)))
            current_bbasisconfig = fitting_attempts_list[best_ind][1].copy()

            if finished_fit_cycles < fit_cycles and (noise_rel_sigma > 0) or (noise_abs_sigma > 0):
                all_coeffs = current_bbasisconfig.get_all_coeffs()
                noisy_all_coeffs = all_coeffs
                if noise_rel_sigma > 0:
                    log.info(
                        "Applying Gaussian noise with relative sigma/mean = {:>1.4e} to all optimizable coefficients".format(
                            noise_rel_sigma))
                    noisy_all_coeffs = apply_noise(all_coeffs, noise_rel_sigma, relative=True)
                elif noise_abs_sigma > 0:
                    log.info(
                        "Applying Gaussian noise with sigma = {:>1.4e} to all optimizable coefficients".format(
                            noise_abs_sigma))
                    noisy_all_coeffs = apply_noise(all_coeffs, noise_abs_sigma, relative=False)
                current_bbasisconfig.set_all_coeffs(noisy_all_coeffs)

        # chose the best fit attempt among fitting_attempts_list
        best_fitting_attempts_ind = np.argmin([v[0] for v in fitting_attempts_list])
        log.info("Best fitting attempt is #{}".format(best_fitting_attempts_ind + 1))
        current_bbasisconfig = fitting_attempts_list[best_fitting_attempts_ind][1]
        save_interim_potential(current_bbasisconfig)
        return current_bbasisconfig
Exemple #4
0
def extend_basis(initial_basis: BBasisConfiguration,
                 final_basis: BBasisConfiguration,
                 ladder_type: str,
                 func_step: int = None) -> BBasisConfiguration:
    if initial_basis.total_number_of_functions == final_basis.total_number_of_functions:
        return initial_basis.copy()
    # grow basis by func_step
    initial_basis_funcs_list = BasisFuncsList(initial_basis)

    final_basis_funcs = []
    for block in final_basis.funcspecs_blocks:
        final_basis_funcs += block.funcspecs

    final_basis_funcs = sort_funcspecs_list(final_basis_funcs, ladder_type)

    new_func_list = []
    existing_func_list = []

    skipped_functions = 0

    for new_func in final_basis_funcs:
        if initial_basis_funcs_list.escribed_area_contains(new_func):
            existing_func = initial_basis_funcs_list.find_existing(new_func)
            if existing_func is not None:
                existing_func_list.append(
                    existing_func)  # copy with existing coefficients
            elif initial_basis_funcs_list.at_ns_area_border(new_func):
                ## ASSUME CORNER CASE NOT A HOLE!!!
                if initial_basis_funcs_list.is_max_func(new_func):
                    new_func_list.append(new_func)
                elif ladder_type != 'body_order':
                    new_func_list.append(new_func)
                else:
                    skipped_functions += 1  # skip, as non corner case
            # For other types of ladder growth the possibility of having hole is not considered
            elif ladder_type != 'body_order':
                new_func_list.append(new_func)
            else:
                skipped_functions += 1  # skip, because it is hole
        else:  # add new, green zone
            new_func_list.append(new_func)

    log.info("Skipped functions number: {}".format(skipped_functions))

    # new_func_list = sort_funcspecs_list(new_func_list, 'std_ranking')

    if func_step is not None and len(new_func_list) > func_step:
        new_func_list = new_func_list[:func_step]

    new_func_list = sort_funcspecs_list(new_func_list, 'body_order')

    new_basis_config = initial_basis.copy()
    # TODO: currentlu  only single func spec block is assumed
    new_basis_config.funcspecs_blocks[0].funcspecs = sort_funcspecs_list(
        existing_func_list + new_func_list, 'body_order')
    # update nradmax, lmax, nradabse
    new_nradmax = 0
    new_nradbase = 0
    new_lmax = 0
    new_rankmax = 0
    for func in new_basis_config.funcspecs_blocks[0].funcspecs:
        rank = len(func.ns)
        new_rankmax = max(rank, new_rankmax)
        if rank == 1:
            new_nradbase = max(max(func.ns), new_nradbase)
        else:
            new_nradmax = max(max(func.ns), new_nradmax)
        new_lmax = max(max(func.ls), new_lmax)
    new_basis_config.funcspecs_blocks[0].lmaxi = new_lmax
    new_basis_config.funcspecs_blocks[0].nradmaxi = new_nradmax
    new_basis_config.funcspecs_blocks[0].nradbaseij = new_nradbase
    # update crad
    old_crad = np.array(new_basis_config.funcspecs_blocks[0].radcoefficients)
    new_crad = np.zeros((new_nradmax, new_lmax + 1, new_nradbase))
    for n in range(min(new_nradmax, new_nradbase)):
        new_crad[n, :, n] = 1.
    # print("old_crad.shape = ", old_crad.shape)
    # print("new_crad.shape = ", new_crad.shape)
    if old_crad.shape != (0, ):
        common_shape = [
            min(s1, s2) for s1, s2 in zip(old_crad.shape, new_crad.shape)
        ]
        new_crad[:common_shape[0], :common_shape[1], :
                 common_shape[2]] = old_crad[:common_shape[0], :
                                             common_shape[1], :common_shape[2]]
    new_basis_config.funcspecs_blocks[0].radcoefficients = new_crad

    # core-repulsion translating from final_basis
    new_basis_config.funcspecs_blocks[
        0].core_rep_parameters = final_basis.funcspecs_blocks[
            0].core_rep_parameters
    new_basis_config.funcspecs_blocks[
        0].rho_cut = final_basis.funcspecs_blocks[0].rho_cut
    new_basis_config.funcspecs_blocks[
        0].drho_cut = final_basis.funcspecs_blocks[0].drho_cut

    return new_basis_config