def get_correlations_for_slice(al_data_slice):
  # prim basis tellis it to use 0,1,2 as the 3 Stats
  prim_basis = PrimitiveBasis(n_states=3, domain=[0,2])
  disc_basis = prim_basis.discretize(al_data_slice)
  # get the correlations
  correlations = correlate(disc_basis, periodic_axes=(0, 1))
  return correlations
def compute_correlations(x, correlations=None, compute_flat=True):
  print "-->Constructing Correlations"
  prim_basis = PrimitiveBasis(n_states=3, domain=[0,2])
  x_ = prim_basis.discretize(x)
  if correlations == None:
    x_corr = correlate(x_, periodic_axes=[0, 1])
  else:
    x_corr = correlate(x_, periodic_axes=[0, 1], correlations=correlations)
  if compute_flat:
    x_corr_flat = np.ndarray(shape=(x.shape[0],  x_corr.shape[1]*x_corr.shape[2]*x_corr.shape[3]))
    row_ctr = 0
    for row in x_corr:
      x_corr_flat[row_ctr] = row.flatten()
      row_ctr += 1
    return x_corr, x_corr_flat
  return x_corr
      print "--->RandomForest"
      linker = RandomForestClassifier()
      params = {'n_estimators':range(1,100,10)}
      opt_model = run_gridcv_linkage(y,x_pca,linker,params)
      print('---->n_est:'), (opt_model.best_estimator_.n_estimators)
      r2_mean, r2_std, mse_mean, mse_std = run_conventional_linkage(y,x_pca,5,opt_model)



  quit()



  print "-->Constructing Correlations"
  prim_basis = PrimitiveBasis(n_states=3, domain=[0,2])
  x_ = prim_basis.discretize(x)
  x_corr = correlate(x_, periodic_axes=[0, 1])
  x_corr_flat = np.ndarray(shape=(samples,  x_corr.shape[1]*x_corr.shape[2]*x_corr.shape[3]))
  row_ctr = 0
  for row in x_corr:
    x_corr_flat[row_ctr] = row.flatten()

  print x.shape
  flat_len = (x.shape[0],) + (np.prod(x.shape[1:]),)
  X_train, X_test, y_train, y_test = train_test_split(x.reshape(flat_len), y,
                                                    test_size=0.2, random_state=3)
  print(x_corr.shape)
  print(X_test.shape)
  # uncomment to view one containers
  #draw_correlations(x_corr[0].real)
 def _property2distribution(self, y):
     n_bins = int((self.domain[1] - self.domain[0] + self.dx) / self.dx)
     p_basis = PrimitiveBasis(n_states=n_bins, domain=self.domain)
     return p_basis.discretize(y)[:, 0, :]
def analyze_data_slice(al_data_slice):
  # prim basis
  prim_basis = PrimitiveBasis(n_states=3, domain=[0,2])
  disc_basis = prim_basis.discretize(al_data_slice)
  correlations = correlate(disc_basis)
  return correlations
예제 #6
0
def analyze_data_slice(al_data_slice):
    # prim basis
    prim_basis = PrimitiveBasis(n_states=3, domain=[0, 2])
    disc_basis = prim_basis.discretize(al_data_slice)
    correlations = correlate(disc_basis)
    return correlations
from pymks import PrimitiveBasis
from pymks.datasets import make_checkerboard_microstructure
from pymks.stats import autocorrelate
from pymks.stats import crosscorrelate
from pymks.tools import draw_microstructures
from pymks.tools import draw_autocorrelations
from pymks.tools import draw_crosscorrelations

# Make checkerboard microstructure and observe it.
X = make_checkerboard_microstructure(square_size=10, n_squares=6)
draw_microstructures(X)

# Define the basis for 2-point statistics
prim_basis = PrimitiveBasis(n_states=2)
X_ = prim_basis.discretize(X)

# Computing auto-correlatiions of the microstructure function and drawing the same
X_auto = autocorrelate(X,
                       basis=PrimitiveBasis(n_states=2),
                       periodic_axes=(0, 1))
correlations = [('white', 'white'), ('black', 'black')]
draw_autocorrelations(X_auto[0], autocorrelations=correlations)

# Checking the volume fraction of both the phases i.e. (0,0) value of auto-correlations
centre = (X_auto.shape[1] + 1) / 2
print('Volume fraction of black phase', X_auto[0, centre, centre, 0])
print('Volume fraction of black phase', X_auto[0, centre, centre, 1])

# Computing the cross correlation of the microstructure function and drawing the same
X_cross = crosscorrelate(X,
    # Get a representative slice from the block (or ave or whatever we decide on)
    best_slice = get_best_slice(metadatum['data'])
    # Get 2-pt Stats for the best slice
    print "--->Getting 2pt stats"
    metadatum['stats'] = get_correlations_for_slice(best_slice)
  
  print metadata[0]['stats'].shape
  # Construct X and Y for PCA and linkage
  print "-->Creating X and Y"
  i = 0
  for metadatum in metadata:
    x[i,0:6*metadatum['x']**2] = metadatum['stats'].flatten()
 
  
  prim_basis = PrimitiveBasis(n_states=3, domain=[0,2])
  x_ = prim_basis.discretize(metadata[0]['data'])
  x_corr = correlate(x_)
  draw_correlations(x_corr.real)
  quit()

  # Reduce all 2-pt Stats via PCA
  # Try linear reg on inputs and outputs
  reducer = PCA(n_components=3)
  linker = LinearRegression() 
  model = MKSHomogenizationModel(dimension_reducer=reducer,
                                 property_linker=linker,
                                 compute_correlations=False)
  model.n_components = 40
  model.fit(metadatum['stats'], y, periodic_axes=[0, 1]) 
  print model.reduced_fit_data