Пример #1
0
    return sio.loadmat(filename)


# The pymks takes a different format of data than our .mat data
def shuffle_ndarray(data):
    n, m, k = data.shape
    new_data = np.zeros((k, n, m))
    for i in xrange(k):
        new_data[i, :, :] = data[:, :, i]
    return new_data


def analyze_data_slice(al_data_slice):
    # prim basis
    prim_basis = PrimitiveBasis(n_states=3, domain=[0, 2])
    disc_basis = prim_basis.discretize(al_data_slice)
    correlations = correlate(disc_basis)
    return correlations


if __name__ == '__main__':
    al_data_blob = load_data(
        '/Users/Astraeus/Dropbox/Project 8883/800_1_pp1.mat')
    al_data = shuffle_ndarray(al_data_blob['phase_field_solid'])

    # draw_microstructures(al_data[10:11, :,:])

    # Compute correlations for a single time
    corrs = analyze_data_slice(al_data[10:11, :, :])
    draw_correlations(corrs[0].real)
# Just loads the data from the .mat file in dropbox
def load_data(filename):
  return sio.loadmat(filename)

# The pymks takes a different format of data than our .mat data
def shuffle_ndarray(data):
  n,m,k = data.shape
  new_data = np.zeros((k,n,m))
  for i in xrange(k):
    new_data[i, :, :] = data[:, :, i]
  return new_data


def analyze_data_slice(al_data_slice):
  # prim basis
  prim_basis = PrimitiveBasis(n_states=3, domain=[0,2])
  disc_basis = prim_basis.discretize(al_data_slice)
  correlations = correlate(disc_basis)
  return correlations

if __name__ == '__main__':
  al_data_blob = load_data('/Users/Astraeus/Dropbox/Project 8883/800_1_pp1.mat')
  al_data = shuffle_ndarray(al_data_blob['phase_field_solid'])

  # draw_microstructures(al_data[10:11, :,:])
  
  # Compute correlations for a single time
  corrs = analyze_data_slice(al_data[10:11, :, :])  
  draw_correlations(corrs[0].real)
    # Get a representative slice from the block (or ave or whatever we decide on)
    best_slice = get_best_slice(metadatum['data'])
    # Get 2-pt Stats for the best slice
    print "--->Getting 2pt stats"
    metadatum['stats'] = get_correlations_for_slice(best_slice)
  
  print metadata[0]['stats'].shape
  # Construct X and Y for PCA and linkage
  print "-->Creating X and Y"
  i = 0
  for metadatum in metadata:
    x[i,0:6*metadatum['x']**2] = metadatum['stats'].flatten()
 
  
  prim_basis = PrimitiveBasis(n_states=3, domain=[0,2])
  x_ = prim_basis.discretize(metadata[0]['data'])
  x_corr = correlate(x_)
  draw_correlations(x_corr.real)
  quit()

  # Reduce all 2-pt Stats via PCA
  # Try linear reg on inputs and outputs
  reducer = PCA(n_components=3)
  linker = LinearRegression() 
  model = MKSHomogenizationModel(dimension_reducer=reducer,
                                 property_linker=linker,
                                 compute_correlations=False)
  model.n_components = 40
  model.fit(metadatum['stats'], y, periodic_axes=[0, 1]) 
  print model.reduced_fit_data