Exemplo n.º 1
0
def calibration_loop(overflow, job_folder, c_array):

    start = time()

    for i, c in enumerate(c_array):
        result_file = open(os.path.join(job_folder, 'result.dat'), 'a+')
        cp_file = open(os.path.join(job_folder, 'cp_all.bin'), 'ab')
        u_file = open(os.path.join(job_folder, 'u_slice.bin'), 'ab')
        uv_file = open(os.path.join(job_folder, 'uv_slice.bin'), 'ab')
        u_surface_file = open(os.path.join(job_folder, 'u_surface.bin'), 'ab')

        logging.info('{} {}'.format(i, c))
        result, cp, u, uv, u_surface = work_function(overflow, c, i)
        # logging.info('{} {}'.format(i, result))
        result_file.write('{}\n'.format(result))
        cp_file.write(bytearray(cp))
        u_file.write(bytearray(u))
        uv_file.write(bytearray(uv))
        u_surface_file.write(bytearray(u_surface))

        result_file.close()
        cp_file.close()
        u_file.close()
        uv_file.close()
        u_surface_file.close()

    end = time()
    timer(start, end, 'Time ')
    return
Exemplo n.º 2
0
def abc_classic(C_array):

    N_params = len(C_array[0])
    N = len(C_array)
    logging.info(
        f'Classic ABC algorithm: Number of parameters: {N_params}, Number of samples: {N}'
    )
    logging.info(f'Working function: {g.work_function.__name__}')
    start = time()
    g.par_process.run(func=g.work_function, tasks=C_array)
    result = g.par_process.get_results()
    end = time()
    utils.timer(start, end, 'Time ')
    c = np.array([C[:N_params] for C in result])
    sumstat = np.array([C[N_params:-1] for C in result])
    dist = np.array([C[-1] for C in result])
    n, r, size = utils.check_output_size(N, N_params, len(sumstat[0]))
    for i in range(n):
        np.savez(os.path.join(g.path['output'], 'classic_abc{}.npz'.format(i)),
                 C=c[i * size:(i + 1) * size],
                 sumstat=sumstat[i * size:(i + 1) * size],
                 dist=dist[i * size:(i + 1) * size])
    if r:
        np.savez(os.path.join(g.path['output'], 'classic_abc{}.npz'.format(n)),
                 C=c[n * size:],
                 sumstat=sumstat[n * size:],
                 dist=dist[n * size:])
    return
Exemplo n.º 3
0
    def filter3d_dict(data, scale_k, dx, filename=None):
        """ Tophat (low-pass) filtering for dictionary of 3D arrays
            (performed as multiplication in Fourier space)
        :param data: dict of 3d np.arrays
        :param scale_k: wave number, which define size of filter
        :param dx: distance between data points in physical space
        :param filename: filename if need to save filtered result in .npz file
        :return: dict of filtered arrays
        """
        start = time()
        N_points = next(
            iter(data.values())
        ).shape  # shape of any array in dict (supposed to be the same shapes)
        k = [
            fftfreq(N_points[0], dx[0]),
            fftfreq(N_points[1], dx[1]),
            fftfreq(N_points[2], dx[2])
        ]
        kernel = DataFiltered.tophat_kernel_3d(k,
                                               scale_k)  # Create filter kernel

        result = dict()
        for key, value in data.items():
            result[key] = DataFiltered.filter3d_array(value, kernel)
        end = time()
        print(end - start)
        timer(start, end, 'Time for data filtering')

        if filename:
            logging.info('\nWrite file in ./data/' + filename + '.npz')
            file = './data/' + filename + '.npz'
            np.savez(file, **result)

        return result
Exemplo n.º 4
0
def kdepy_fftkde(data, a, b, num_bin_joint):
    """ Calculate Kernel Density Estimation (KDE) using KDEpy.FFTKDE.
    Note: KDEpy.FFTKDE can do only symmetric kernel (accept only scalar bandwidth).
    We map data to [-1, 1] domain to make bandwidth independent of parameter range and more symmetric
    and use mean of list bandwidths (different bandwidth for each dimension)
    calculated usinf Scott's rule and scipy.stats.gaussian_kde
    :param data: array of parameter samples
    :param a: list of left boundaries
    :param b: list of right boundaries
    :param num_bin_joint: number of bins (cells) per dimension in estimated posterior
    :return: estimated posterior of shape (num_bin_joint, )*dimensions
    """

    N_params = len(data[0])
    logging.info('KDEpy.FFTKDe: Gaussian KDE {} dimensions'.format(N_params))
    time1 = time()
    a = np.array(a)-1e-10
    b = np.array(b)+1e-10
    data = 2 * (data - a) / (b - a) - 1     # transform data to be [-1, 1], since gaussian is the same in all directions
    bandwidth = bw_from_kdescipy(data, 'scott')
    _, grid_ravel = grid_for_kde(-1*np.ones(N_params), np.ones(N_params), num_bin_joint)
    kde = FFTKDE(kernel='gaussian', bw=np.mean(bandwidth))
    kde.fit(data)
    Z = kde.evaluate(grid_ravel.T)
    Z = Z.reshape((num_bin_joint + 1, )*N_params)
    time2 = time()
    timer(time1, time2, "Time for kdepy_fftkde")
    return Z
Exemplo n.º 5
0
def mcmc_chains(n_chains):

    start = time()
    g.par_process.run(func=one_chain, tasks=np.arange(n_chains))
    end = time()
    utils.timer(start, end, 'Time for running chains')

    # result = g.par_process.get_results()
    # accepted = np.array([chunk[:N_params] for item in result for chunk in item])
    # sumstat = np.array([chunk[N_params:-1] for item in result for chunk in item])
    # dist = np.array([chunk[-1] for item in result for chunk in item])
    return
Exemplo n.º 6
0
def calibration_loop(sampling_type, C_limits, N_calibration):

    logging.info('Sampling {}'.format(sampling))
    C_array = sampling(sampling_type, C_limits, N_calibration)
    logging.info(f'Calibration step 1')
    start_calibration = time()
    g.par_process.run(func=g.work_function, tasks=C_array)
    S_init = g.par_process.get_results()
    end_calibration = time()
    utils.timer(start_calibration, end_calibration,
                'Time of calibration step 1')
    logging.debug(
        f'After Calibration: Number of inf = {np.sum(np.isinf(np.array(S_init)[:, -1]))}'
    )
    return S_init
Exemplo n.º 7
0
 def run_overflow(self):
     exe = os.path.join(self.exe_path, 'overflowmpi')
     outfile = os.path.join(self.job_folder, 'over.out')
     # Run overflow
     time_start = time()
     args = ['mpiexec', '-np', str(self.MPI_NP), '-d', self.job_folder, exe]
     # logging.info(args)
     with open(outfile, 'wb', 8) as f:
         sp.Popen(args,
                  cwd=self.job_folder,
                  env=self.env,
                  stdout=f,
                  stderr=f).wait()
     time_end = time()
     timer(time_start, time_end, 'Overflow time')
     if time_end - time_start < 100:
         return False
     return True
Exemplo n.º 8
0
def gaussian_kde_scipy(data, a, b, num_bin_joint, bw=None, weights=None):

    if hasattr(a, "__len__"):
        n_params = len(a)
    else:
        n_params = 1
        a, b = [a], [b]
    logging.info('Scipy: Gaussian KDE {} dimensions with {} bins per dimension'.format(n_params, num_bin_joint))
    if bw:
        kde = gaussian_kde(data.T, bw_method=bw, weights=weights)
    else:
        kde = gaussian_kde(data.T, bw_method='scott', weights=weights)
    # # evaluate on a regular grid
    grid_mesh, grid_ravel = grid_for_kde(a, b, num_bin_joint)
    time1 = time()
    print('kde_factor', kde.covariance_factor())
    Z = kde.evaluate(grid_ravel)
    Z = Z.reshape(grid_mesh[0].shape)
    time2 = time()
    timer(time1, time2, "Time for gaussian_kde_scipy")

    return Z