Ejemplo n.º 1
0
def postprocess(station_nums, ref_num):
    
    filename = 'P_q'+str(station_nums[0]+1)+'_q'+str(station_nums[1]+1)
    if len(station_nums) == 3:
        filename += '_q'+str(station_nums[2]+1)
    filename += '_ref_'+str(ref_num+1)

    data = Q[:, station_nums]
    q_ref = Q_ref[ref_num, station_nums]

    # Create Simple function approximation
    # Save points used to parition D for simple function approximation and the
    # approximation itself (this can be used to make close comparisions...)
    (rho_D_M, d_distr_samples, d_Tree) = sfun.uniform_hyperrectangle(data,
            q_ref, bin_ratio=0.15,
            center_pts_per_edge=np.ones((data.shape[1],)))
    mdict = dict()
    mdict['rho_D_M'] = rho_D_M
    mdict['d_distr_samples'] = d_distr_samples

    # Calclate P on the actual samples with assumption that voronoi cells have
    # equal size
    (P1, lam_vol1, io_ptr1) = calcP.prob(samples, data, rho_D_M,
            d_distr_samples, d_Tree)
    print "Calculating prob"
    mdict['P1'] = P1
    mdict['lam_vol1'] = lam_vol1
    mdict['lem1'] = samples
    mdict['io_ptr1'] = io_ptr1

    # Export P and compare to MATLAB solution visually
    sio.savemat(filename, mdict, do_compression=True)
Ejemplo n.º 2
0
def postprocess(station_nums, ref_num):
    
    filename = 'P_q'+str(station_nums[0]+1)+'_q'+str(station_nums[1]+1)
    if len(station_nums) == 3:
        filename += '_q'+str(station_nums[2]+1)
    filename += '_ref_'+str(ref_num+1)

    data = Q[:, station_nums]
    q_ref = Q_ref[ref_num, station_nums]

    # Create Simple function approximation
    # Save points used to parition D for simple function approximation and the
    # approximation itself (this can be used to make close comparisions...)
    (rho_D_M, d_distr_samples, d_Tree) = sfun.uniform_hyperrectangle(data,
            q_ref, bin_ratio=0.15,
            center_pts_per_edge=np.ones((data.shape[1],)))

    num_l_emulate = 1e6
    lambda_emulate = calcP.emulate_iid_lebesgue(lam_domain, num_l_emulate)
    
    if comm.rank == 0:
        print "Finished emulating lambda samples"
        mdict = dict()
        mdict['rho_D_M'] = rho_D_M
        mdict['d_distr_samples'] = d_distr_samples 
        mdict['num_l_emulate'] = num_l_emulate

    # Calculate P on lambda emulate
    (P0, lem0, io_ptr0, emulate_ptr0) = calcP.prob_emulated(samples, data,
            rho_D_M, d_distr_samples, lambda_emulate, d_Tree)
    if comm.rank == 0:
        print "Calculating prob_emulated"
        mdict['P0'] = P0
        mdict['lem0'] = lem0
        mdict['io_ptr0'] = io_ptr0
        mdict['emulate_ptr0'] = emulate_ptr0

    # Calclate P on the actual samples with assumption that voronoi cells have
    # equal size
    (P1, lam_vol1, io_ptr1) = calcP.prob(samples, data,
            rho_D_M, d_distr_samples, d_Tree)
    if comm.rank == 0:
        print "Calculating prob"
        mdict['P1'] = P1
        mdict['lam_vol1'] = lam_vol1
        mdict['lem1'] = samples
        mdict['io_ptr1'] = io_ptr1

    # Calculate P on the actual samples estimating voronoi cell volume with MC
    # integration
    (P3, lam_vol3, lambda_emulate3, io_ptr3, emulate_ptr3) = calcP.prob_mc(samples,
            data, rho_D_M, d_distr_samples, lambda_emulate, d_Tree)
    if comm.rank == 0:
        print "Calculating prob_mc"
        mdict['P3'] = P3
        mdict['lam_vol3'] = lam_vol3
        mdict['io_ptr3'] = io_ptr3
        mdict['emulate_ptr3'] = emulate_ptr3
        # Export P
        sio.savemat(filename, mdict, do_compression=True)
Ejemplo n.º 3
0
def postprocess(station_nums, ref_num):
    
    filename = 'P_q'+str(station_nums[0]+1)+'_q'
    if len(station_nums) == 3:
        filename += '_q'+str(station_nums[2]+1)
    filename += '_ref_'+str(ref_num+1)

    data = Q[:, station_nums]
    q_ref = Q_ref[ref_num, station_nums]

    # Create Simple function approximation
    # Save points used to parition D for simple function approximation and the
    # approximation itself (this can be used to make close comparisions...)
    (rho_D_M, d_distr_samples, d_Tree) = sfun.uniform_hyperrectangle(data,
            q_ref, bin_ratio=0.15,
            center_pts_per_edge=np.ones((data.shape[1],)))

    num_l_emulate = 1e6
    lambda_emulate = calcP.emulate_iid_lebesgue(lam_domain, num_l_emulate)
    print "Finished emulating lambda samples"

    mdict = dict()
    mdict['rho_D_M'] = rho_D_M
    mdict['d_distr_samples'] = d_distr_samples 
    mdict['num_l_emulate'] = num_l_emulate
    mdict['lambda_emulate'] = lambda_emulate

    # Calculate P on lambda emulate
    (P0, lem0, io_ptr0, emulate_ptr0) = calcP.prob_emulated(samples, data,
            rho_D_M, d_distr_samples, lambda_emulate, d_Tree)
    print "Calculating prob_emulated"
    mdict['P0'] = P0
    mdict['lem0'] = lem0
    mdict['io_ptr0'] = io_ptr0
    mdict['emulate_ptr0'] = emulate_ptr0

    # Calclate P on the actual samples with assumption that voronoi cells have
    # equal size
    (P1, lam_vol1, io_ptr1) = calcP.prob(samples, data,
            rho_D_M, d_distr_samples, d_Tree)
    print "Calculating prob"
    mdict['P1'] = P1
    mdict['lam_vol1'] = lam_vol1
    mdict['lem1'] = samples
    mdict['io_ptr1'] = io_ptr1

    # Calculate P on the actual samples estimating voronoi cell volume with MC
    # integration
    (P3, lam_vol3, lambda_emulate3, io_ptr3, emulate_ptr3) = calcP.prob_mc(samples,
            data, rho_D_M, d_distr_samples, lambda_emulate, d_Tree)
    print "Calculating prob_mc"
    mdict['P3'] = P3
    mdict['lam_vol3'] = lam_vol3
    mdict['io_ptr3'] = io_ptr3
    mdict['emulate_ptr3'] = emulate_ptr3
    # Export P
    sio.savemat(filename, mdict, do_compression=True)
Ejemplo n.º 4
0
 def setUp(self):
     self.samples = np.loadtxt(data_path + "/3to2_samples.txt.gz")
     self.data = np.loadtxt(data_path + "/3to2_data.txt.gz")
     Q_ref =  np.array([0.422, 0.9385])
     (self.d_distr_prob, self.d_distr_samples, self.d_Tree) = simpleFunP.uniform_hyperrectangle(data=self.data,Q_ref=Q_ref, bin_ratio=0.2, center_pts_per_edge = 1)
     self.lam_domain= np.array([[0.0, 1.0],
                                [0.0, 1.0],
                                [0.0, 1.0]])
     import numpy.random as rnd
     rnd.seed(1)
     self.lambda_emulate = calcP.emulate_iid_lebesgue(lam_domain=self.lam_domain, 
                                                      num_l_emulate = 1001)
Ejemplo n.º 5
0
 def setUp(self):
     self.samples = np.loadtxt(data_path + "/3to2_samples.txt.gz")
     self.data = np.loadtxt(data_path + "/3to2_data.txt.gz")
     Q_ref = np.array([0.422, 0.9385])
     (self.d_distr_prob, self.d_distr_samples,
      self.d_Tree) = simpleFunP.uniform_hyperrectangle(
          data=self.data, Q_ref=Q_ref, bin_ratio=0.2, center_pts_per_edge=1)
     self.lam_domain = np.array([[0.0, 1.0], [0.0, 1.0], [0.0, 1.0]])
     import numpy.random as rnd
     rnd.seed(1)
     self.lambda_emulate = calcP.emulate_iid_lebesgue(
         lam_domain=self.lam_domain, num_l_emulate=1001)
Ejemplo n.º 6
0
 def setUp(self):
     """
     Set up problem.
     """
     import numpy.random as rnd
     rnd.seed(1)
     self.lam_domain=np.zeros((1,2))
     self.lam_domain[0,0]=0.0
     self.lam_domain[0,1]=1.0
     self.num_l_emulate = 1001
     self.lambda_emulate = calcP.emulate_iid_lebesgue(self.lam_domain, self.num_l_emulate)
     self.samples =  rnd.rand(100,)
     self.data = 2.0*self.samples
     Q_ref =  np.mean(self.data, axis=0)
     (self.d_distr_prob, self.d_distr_samples, self.d_Tree) = simpleFunP.uniform_hyperrectangle(data=self.data,Q_ref=Q_ref, bin_ratio=0.2, center_pts_per_edge = 1)
Ejemplo n.º 7
0
 def setUp(self):
     """
     Set up problem.
     """
     import numpy.random as rnd
     rnd.seed(1)
     self.lam_domain = np.zeros((1, 2))
     self.lam_domain[0, 0] = 0.0
     self.lam_domain[0, 1] = 1.0
     self.num_l_emulate = 1001
     self.lambda_emulate = calcP.emulate_iid_lebesgue(
         self.lam_domain, self.num_l_emulate)
     self.samples = rnd.rand(100, )
     self.data = 2.0 * self.samples
     Q_ref = np.mean(self.data, axis=0)
     (self.d_distr_prob, self.d_distr_samples,
      self.d_Tree) = simpleFunP.uniform_hyperrectangle(
          data=self.data, Q_ref=Q_ref, bin_ratio=0.2, center_pts_per_edge=1)
Ejemplo n.º 8
0
def postprocess(station_nums, ref_num):
    
    filename = 'P_q'+str(station_nums[0]+1)+'_q'+str(station_nums[1]+1)
    if len(station_nums) == 3:
        filename += '_q'+str(station_nums[2]+1)
    filename += '_truth_'+str(ref_num+1)

    data = Q[:, station_nums]
    q_ref = Q_ref[ref_num, station_nums]

    # Create Simple function approximation
    # Save points used to parition D for simple function approximation and the
    # approximation itself (this can be used to make close comparisions...)
    (rho_D_M, d_distr_samples, d_Tree) = sfun.uniform_hyperrectangle(data,
            q_ref, bin_ratio=0.15,
            center_pts_per_edge=np.ones((data.shape[1],)))

    num_l_emulate = 1e6
    lambda_emulate = calcP.emulate_iid_lebesgue(lam_domain, num_l_emulate)
    print "Finished emulating lambda samples"

    # Calculate P on the actual samples estimating voronoi cell volume with MC
    # integration
    (P3, lam_vol3, lambda_emulate3, io_ptr3, emulate_ptr3) = calcP.prob_mc(samples,
            data, rho_D_M, d_distr_samples, lam_domain, lambda_emulate, d_Tree)
    print "Calculating prob_mc"
    mdict = dict()
    mdict['rho_D_M'] = rho_D_M
    mdict['d_distr_samples'] = d_distr_samples 
    mdict['lambda_emulate'] = util.get_global_values(lambda_emulate)   
    mdict['num_l_emulate'] = mdict['lambda_emulate'].shape[1]
    mdict['P3'] = util.get_global_values(P3)
    mdict['lam_vol3'] = util.get_global_values(lam_vol3)
    mdict['io_ptr3'] = util.get_global_values(io_ptr3)
    mdict['emulate_ptr3'] = emulate_ptr3
        
    if rank == 0:
        # Export P and compare to MATLAB solution visually
        sio.savemat(filename, mdict, do_compression=True)
Ejemplo n.º 9
0
    def setUp(self):
        """
        Set up problem.
        """
        super(uniform_hyperrectangle_ratio_list, self).setUp()
        if type(self.Q_ref) != np.array:
            Q_ref = np.array([self.Q_ref])
        else:
            Q_ref = self.Q_ref
        if len(self.data_domain.shape) == 1:
            data_domain = np.expand_dims(self.data_domain, axis=0)
        else:
            data_domain = self.data_domain

        self.rect_domain = np.zeros((data_domain.shape[0], 2))
        binratio = 0.1*np.ones((data_domain.shape[0],))
        r_width = binratio*data_domain[:,1]

        self.rect_domain[:, 0] = Q_ref - .5*r_width
        self.rect_domain[:, 1] = Q_ref + .5*r_width

        self.rho_D_M, self.d_distr_samples, self.d_Tree = sFun.uniform_hyperrectangle(self.data, 
                self.Q_ref, binratio, self.center_pts_per_edge)
Ejemplo n.º 10
0
def postprocess(station_nums, ref_num):

    filename = 'P_q' + str(station_nums[0] + 1) + '_q' + str(station_nums[1] +
                                                             1)
    if len(station_nums) == 3:
        filename += '_q' + str(station_nums[2] + 1)
    filename += '_ref_' + str(ref_num + 1)

    data = Q[:, station_nums]
    q_ref = Q_ref[ref_num, station_nums]

    # Create Simple function approximation
    # Save points used to parition D for simple function approximation and the
    # approximation itself (this can be used to make close comparisions...)
    (rho_D_M, d_distr_samples,
     d_Tree) = sfun.uniform_hyperrectangle(data,
                                           q_ref,
                                           bin_ratio=0.15,
                                           center_pts_per_edge=np.ones(
                                               (data.shape[1], )))
    mdict = dict()
    mdict['rho_D_M'] = rho_D_M
    mdict['d_distr_samples'] = d_distr_samples

    # Calclate P on the actual samples with assumption that voronoi cells have
    # equal size
    (P1, lam_vol1, io_ptr1) = calcP.prob(samples, data, rho_D_M,
                                         d_distr_samples, d_Tree)
    print "Calculating prob"
    mdict['P1'] = P1
    mdict['lam_vol1'] = lam_vol1
    mdict['lem1'] = samples
    mdict['io_ptr1'] = io_ptr1

    # Export P and compare to MATLAB solution visually
    sio.savemat(filename, mdict, do_compression=True)
Ejemplo n.º 11
0
dmin = data.min(axis=0)
dscale = bin_ratio*(dmax-dmin)
Qmax = Q_ref + 0.5*dscale
Qmin = Q_ref -0.5*dscale
def rho_D(x):
  return np.all(np.logical_and(np.greater(x,Qmin), np.less(x,Qmax)),axis=1)

# Plot the data domain
plotD.show_data(data, Q_ref = Q_ref, rho_D=rho_D, showdim=2)

# Whether or not to use deterministic description of simple function approximation of
# ouput probability
deterministic_discretize_D = True
if deterministic_discretize_D == True:
  (d_distr_prob, d_distr_samples, d_Tree) = simpleFunP.uniform_hyperrectangle(data=data,
                                                                              Q_ref=Q_ref, 
                                                                              bin_ratio=bin_ratio, 
                                                                              center_pts_per_edge = 1)
else:
  (d_distr_prob, d_distr_samples, d_Tree) = simpleFunP.unif_unif(data=data,
                                                                 Q_ref=Q_ref, 
                                                                 M=50, 
                                                                 bin_ratio=bin_ratio, 
                                                                 num_d_emulate=1E5)
  
# calculate probablities making Monte Carlo assumption
(P,  lam_vol, io_ptr) = calculateP.prob(samples=samples,
                                        data=data,
                                        rho_D_M=d_distr_prob,
                                        d_distr_samples=d_distr_samples)

# calculate 2D marginal probabilities
Ejemplo n.º 12
0
defeats the purpose of "localizing" the probability within a subset of D.
    
uniform_hyperrectangle uses the same measure defined in the same way as 
unif_unif, but the difference is in the discretization which is on a 
regular grid defined by center_pts_per_edge. If center_pts_per_edge = 1, 
then the contour event corresponding to the entire support of rho_D is
approximated as a single event. This is done by carefully placing a 
regular 3x3 grid (for the D=2 case) of points in D with the center
point of the grid in the center of the support of the measure and the 
other points placed outside of the rectangle defining the support to 
define a total of 9 contour events with 8 of them with zero probability.
'''
deterministic_discretize_D = True

if deterministic_discretize_D == True:
  (d_distr_prob, d_distr_samples, d_Tree) = simpleFunP.uniform_hyperrectangle(data=data,
                                              Q_ref=Q_ref, bin_ratio=0.2, center_pts_per_edge = 1)
else:
  (d_distr_prob, d_distr_samples, d_Tree) = simpleFunP.unif_unif(data=data,
                                              Q_ref=Q_ref, M=50, bin_ratio=0.2, num_d_emulate=1E5)


# create emulated points
'''
Suggested changes for user:
    
If using a regular grid of sampling (if random_sample = False), we set
    
  lambda_emulate = samples
  
Otherwise, play around with num_l_emulate. A value of 1E2 will probably
give poor results while results become fairly consistent with values 
uniform_hyperrectangle uses the same measure defined in the same way as 
unif_unif, but the difference is in the discretization which is on a 
regular grid defined by center_pts_per_edge. If center_pts_per_edge = 1, 
then the contour event corresponding to the entire support of rho_D is
approximated as a single event. This is done by carefully placing a 
regular 3x3 grid (for the D=2 case) of points in D with the center
point of the grid in the center of the support of the measure and the 
other points placed outside of the rectangle defining the support to 
define a total of 9 contour events with 8 of them with zero probability.
'''
deterministic_discretize_D = True

if deterministic_discretize_D == True:
    (d_distr_prob, d_distr_samples,
     d_Tree) = simpleFunP.uniform_hyperrectangle(data=data,
                                                 Q_ref=Q_ref,
                                                 bin_ratio=0.2,
                                                 center_pts_per_edge=1)
else:
    (d_distr_prob, d_distr_samples,
     d_Tree) = simpleFunP.unif_unif(data=data,
                                    Q_ref=Q_ref,
                                    M=50,
                                    bin_ratio=0.2,
                                    num_d_emulate=1E5)

# create emulated points
'''
Suggested changes for user:
    
If using a regular grid of sampling (if random_sample = False), we set