Esempio n. 1
0
    def save(self, mdict, save_file, discretization=None, globalize=False):
        """
        Save matrices to a ``*.mat`` file for use by ``MATLAB BET`` code and
        :meth:`~bet.basicSampling.loadmat`

        :param dict mdict: dictonary of sampler parameters
        :param string save_file: file name
        :param discretization: input and output from sampling
        :type discretization: :class:`bet.sample.discretization`
        :param bool globalize: Makes local variables global. 

        """

        if comm.size > 1 and not globalize:
            local_save_file = os.path.join(
                os.path.dirname(save_file),
                "proc{}_{}".format(comm.rank, os.path.basename(save_file)))
        else:
            local_save_file = save_file

        if (globalize and comm.rank == 0) or not globalize:
            sio.savemat(local_save_file, mdict)
        comm.barrier()

        if discretization is not None:
            sample.save_discretization(discretization,
                                       save_file,
                                       globalize=globalize)
Esempio n. 2
0
def postprocess(station_nums, ref_num):
    
    filename = 'P_q'+str(station_nums[0]+1)+'_q'+str(station_nums[1]+1)
    if len(station_nums) == 3:
        filename += '_q'+str(station_nums[2]+1)
    filename += '_ref_'+str(ref_num+1)

    data = Q[:, station_nums]
    output_sample_set = sample.sample_set(data.shape[1])
    output_sample_set.set_values(data)
    q_ref = Q_ref[ref_num, station_nums]

    # Create Simple function approximation
    # Save points used to parition D for simple function approximation and the
    # approximation itself (this can be used to make close comparisions...)
    output_probability_set = sfun.regular_partition_uniform_distribution_rectangle_scaled(\
            output_sample_set, q_ref, rect_scale=0.15,
            cells_per_dimension=np.ones((data.shape[1],)))

    my_disc = sample.discretization(input_sample_set, output_sample_set,
            output_probability_set)

    # Calclate P on the actual samples with assumption that voronoi cells have
    # equal size
    input_sample_set.estimate_volume_mc()
    print "Calculating prob"
    calcP.prob(my_disc)
    sample.save_discretization(my_disc, filename, "prob_solution")
Esempio n. 3
0
def postprocess(station_nums, ref_num):

    filename = 'P_q' + str(station_nums[0] + 1) + '_q' + str(station_nums[1] +
                                                             1)
    if len(station_nums) == 3:
        filename += '_q' + str(station_nums[2] + 1)
    filename += '_ref_' + str(ref_num + 1)

    data = Q[:, station_nums]
    output_sample_set = sample.sample_set(data.shape[1])
    output_sample_set.set_values(data)
    q_ref = Q_ref[ref_num, station_nums]

    # Create Simple function approximation
    # Save points used to parition D for simple function approximation and the
    # approximation itself (this can be used to make close comparisions...)
    output_probability_set = sfun.regular_partition_uniform_distribution_rectangle_scaled(\
            output_sample_set, q_ref, rect_scale=0.15,
            cells_per_dimension=np.ones((data.shape[1],)))

    my_disc = sample.discretization(input_sample_set, output_sample_set,
                                    output_probability_set)

    # Calclate P on the actual samples with assumption that voronoi cells have
    # equal size
    input_sample_set.estimate_volume_mc()
    print "Calculating prob"
    calcP.prob(my_disc)
    sample.save_discretization(my_disc, filename, "prob_solution")
Esempio n. 4
0
    def save(self, mdict, save_file, discretization=None, globalize=False):
        """
        Save matrices to a ``*.mat`` file for use by ``MATLAB BET`` code and
        :meth:`~bet.basicSampling.loadmat`

        :param dict mdict: dictonary of sampler parameters
        :param string save_file: file name
        :param discretization: input and output from sampling
        :type discretization: :class:`bet.sample.discretization`
        :param bool globalize: Makes local variables global. 

        """

        if comm.size > 1 and not globalize:
            local_save_file = os.path.join(os.path.dirname(save_file),
                                           "proc{}_{}".format(comm.rank, os.path.basename(save_file)))
        else:
            local_save_file = save_file

        if (globalize and comm.rank == 0) or not globalize:
            sio.savemat(local_save_file, mdict)
        comm.barrier()

        if discretization is not None:
            sample.save_discretization(discretization, save_file,
                                       globalize=globalize)
Esempio n. 5
0
def postprocess(station_nums, ref_num):

    filename = 'P_q' + str(station_nums[0] + 1) + \
        '_q' + str(station_nums[1] + 1)
    if len(station_nums) == 3:
        filename += '_q' + str(station_nums[2] + 1)
    filename += '_ref_' + str(ref_num + 1)

    data = Q[:, station_nums]
    output_sample_set = sample.sample_set(data.shape[1])
    output_sample_set.set_values(data)
    q_ref = Q_ref[ref_num, station_nums]

    # Create Simple function approximation
    # Save points used to parition D for simple function approximation and the
    # approximation itself (this can be used to make close comparisions...)
    output_probability_set = sfun.regular_partition_uniform_distribution_rectangle_scaled(
        output_sample_set,
        q_ref,
        rect_scale=0.15,
        cells_per_dimension=np.ones((data.shape[1], )))

    num_l_emulate = 1e4
    set_emulated = bsam.random_sample_set('r', lam_domain, num_l_emulate)
    my_disc = sample.discretization(input_sample_set,
                                    output_sample_set,
                                    output_probability_set,
                                    emulated_input_sample_set=set_emulated)

    print("Finished emulating lambda samples")

    # Calculate P on lambda emulate
    print("Calculating prob_on_emulated_samples")
    calcP.prob_on_emulated_samples(my_disc)
    sample.save_discretization(my_disc, filename,
                               "prob_on_emulated_samples_solution")

    # Calclate P on the actual samples with assumption that voronoi cells have
    # equal size
    input_sample_set.estimate_volume_mc()
    print("Calculating prob")
    calcP.prob(my_disc)
    sample.save_discretization(my_disc, filename, "prob_solution")

    # Calculate P on the actual samples estimating voronoi cell volume with MC
    # integration
    calcP.prob_with_emulated_volumes(my_disc)
    print("Calculating prob_with_emulated_volumes")
    sample.save_discretization(my_disc, filename,
                               "prob_with_emulated_volumes_solution")
Esempio n. 6
0
disc_name_base = "disc_prob"
for scale in range(1):
    scale = 5
    fact = 2**scale
    fname = file_name_base + "_p05d" + str(fact)
    fname2 = disc_name_base + "_p05d" + str(fact)

    simpleFunP.regular_partition_uniform_distribution_rectangle_scaled(
        data_set=my_discretization,
        Q_ref=Q_ref,
        rect_scale=0.05 / fact,
        center_pts_per_edge=1)

    # calculate the induced probability
    calculateP.prob(my_discretization)
    samp.save_discretization(my_discretization, fname)
    P = input_samples.get_probabilities()

    case = []
    discrete_prob = np.zeros((np.size(var), np.size(eta)))
    '''
     discrete_prob[i][j][k] = prob of i^th variance, j^th correlation length
    '''
    i = 0
    j = 0
    for variance in var:
        j = 0
        for corr in eta:
            case.append(variance)
            case.append(corr)
            # check which case number this corresponds to
output_samples = samp.sample_set(2)
output_samples.set_values(data)

disc_name_base = "disc_prob_simpleFunc"
# create a discretization object
my_discretization = samp.discretization(input_sample_set=input_samples,
                                        output_sample_set=output_samples)
fact = 4
simpleFunP.regular_partition_uniform_distribution_rectangle_scaled(
    data_set=my_discretization, Q_ref=Q_ref, rect_scale=0.2 / fact,
    center_pts_per_edge=1)

# calculate the induced probability
calculateP.prob(my_discretization)
samp.save_discretization(my_discretization, fname)
P = input_samples.get_probabilities()

case = []
discrete_prob = np.zeros(3)
print np.shape(P)

print n_samples
stride = 0
for meso in range(3):
    print stride, stride + n_samples
    print sum(P[meso * n_samples:(meso + 1) * n_samples, ])
    discrete_prob[meso] = sum(P[meso * n_samples:(meso + 1) * n_samples, ])
    stride += n_samples

fname2 = disc_name_base + "_p01d" + str(fact)