def get_correlations_for_slice(al_data_slice):
  # prim basis tellis it to use 0,1,2 as the 3 Stats
  prim_basis = PrimitiveBasis(n_states=3, domain=[0,2])
  disc_basis = prim_basis.discretize(al_data_slice)
  # get the correlations
  correlations = correlate(disc_basis, periodic_axes=(0, 1))
  return correlations
Esempio n. 2
0
def test_normalization_rfftn():
    """Test normalization with rfftn
    """
    from pymks import PrimitiveBasis
    from pymks.stats import _normalize
    prim_basis = PrimitiveBasis()
    Nx = Ny = 5
    X_ = np.zeros((1, Nx, Ny, 1))
    prim_basis._axes = np.arange(X_.ndim - 2) + 1
    prim_basis._axes_shape = (2 * Nx, 2 * Ny)
    norm = _normalize(X_.shape, prim_basis, None)
    assert norm.shape == (1, Nx, Ny, 1)
    assert np.allclose(norm[0, Nx // 2, Ny // 2, 0], 25)
def compute_correlations(x, correlations=None, compute_flat=True):
  print "-->Constructing Correlations"
  prim_basis = PrimitiveBasis(n_states=3, domain=[0,2])
  x_ = prim_basis.discretize(x)
  if correlations == None:
    x_corr = correlate(x_, periodic_axes=[0, 1])
  else:
    x_corr = correlate(x_, periodic_axes=[0, 1], correlations=correlations)
  if compute_flat:
    x_corr_flat = np.ndarray(shape=(x.shape[0],  x_corr.shape[1]*x_corr.shape[2]*x_corr.shape[3]))
    row_ctr = 0
    for row in x_corr:
      x_corr_flat[row_ctr] = row.flatten()
      row_ctr += 1
    return x_corr, x_corr_flat
  return x_corr
def plot_component_variance(x, y):
  prim_basis = PrimitiveBasis(n_states=3, domain=[0, 2])
  model = MKSHomogenizationModel(basis=prim_basis)
  model.n_components = 20
  model.fit(x, y, periodic_axes=[0, 1])
  # Draw the plot containing the PCA variance accumulation
  draw_component_variance(model.dimension_reducer.explained_variance_ratio_)
Esempio n. 5
0
def test_default_correlations():
    from pymks import PrimitiveBasis
    from pymks import MKSStructureAnalysis
    prim_basis = PrimitiveBasis(6)
    model_prim = MKSStructureAnalysis(basis=prim_basis)
    assert model_prim.correlations == [(0, 0), (0, 1), (0, 2), (0, 3), (0, 4),
                                       (0, 5)]
Esempio n. 6
0
def test_reshape_X():
    from pymks import MKSStructureAnalysis
    from pymks import PrimitiveBasis
    anaylzer = MKSStructureAnalysis(basis=PrimitiveBasis())
    X = np.arange(18, dtype='float64').reshape(2, 3, 3)
    X_test = np.concatenate((np.arange(-4, 5)[None], np.arange(-4, 5)[None]))
    assert np.allclose(anaylzer._reduce_shape(X), X_test)
Esempio n. 7
0
def test_default_correlations():
    from pymks import PrimitiveBasis
    from pymks import MKSHomogenizationModel
    prim_basis = PrimitiveBasis(6)
    model_prim = MKSHomogenizationModel(basis=prim_basis)
    assert model_prim.correlations == [(0, 0), (0, 1), (0, 2), (0, 3), (0, 4),
                                       (0, 5)]
Esempio n. 8
0
def test_set_correlations():
    from pymks import PrimitiveBasis
    from pymks import MKSHomogenizationModel
    test_correlations = [(0, 0), (0, 2), (0, 4)]
    prim_basis = PrimitiveBasis(6)
    model_prim = MKSHomogenizationModel(basis=prim_basis,
                                        correlations=test_correlations)
    assert model_prim.correlations == test_correlations
Esempio n. 9
0
def test_set_correlations():
    from pymks import PrimitiveBasis
    from pymks import MKSStructureAnalysis
    test_correlations = [(0, 0), (0, 2), (0, 4)]
    prim_basis = PrimitiveBasis(6)
    model_prim = MKSStructureAnalysis(basis=prim_basis,
                                      correlations=test_correlations)
    assert model_prim.correlations == test_correlations
Esempio n. 10
0
def test_stats_in_parallel():
    from pymks.bases import PrimitiveBasis
    from pymks.stats import correlate
    from pymks.datasets import make_microstructure
    p_basis = PrimitiveBasis(5)
    X = make_microstructure(n_samples=5, n_phases=3)
    X_corr_actual = correlate(X, p_basis)
    for i in range(1, 4):
        X_corr_test = correlate(X, p_basis, n_jobs=i)
        assert np.allclose(X_corr_actual, X_corr_test)
Esempio n. 11
0
def test_set_components():
    from pymks import MKSStructureAnalysis
    from pymks import PrimitiveBasis
    p_basis = PrimitiveBasis(2)
    model = MKSStructureAnalysis(basis=p_basis)
    X = np.random.randint(2, size=(50, 10, 10))
    model.fit(X)
    components = model.components_
    model.components_ = components * 2
    assert np.allclose(model.components_, components * 2)
Esempio n. 12
0
def test_crosscorrelate_with_specific_correlations():
    from pymks.stats import crosscorrelate
    from pymks import PrimitiveBasis
    X = np.array([[[0, 0, 0, 0], [0, 1, 0, 0], [0, 0, 2, 0], [0, 0, 0, 0],
                   [0, 0, 0, 0]]])
    crosscorrelations = [(1, 2)]
    p_basis = PrimitiveBasis(n_states=3)
    X_cross = crosscorrelate(X, p_basis, crosscorrelations=crosscorrelations)
    X_result = np.array([[0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.],
                         [0., 0., 0., 1 / 12.], [0., 0., 0., 0.]])
    assert np.allclose(X_cross[0, ..., 0], X_result)
Esempio n. 13
0
def test_coef_setter():
    from pymks import MKSHomogenizationModel
    from pymks import PrimitiveBasis
    p_basis = PrimitiveBasis(2)
    model = MKSHomogenizationModel(basis=p_basis)
    X = np.random.randint(2, size=(50, 10, 10))
    y = np.random.randint(2, size=(50, ))
    model.fit(X, y)
    coefs = model.coef_
    model.coef_ = coefs * 2
    assert np.allclose(model.coef_, coefs * 2)
Esempio n. 14
0
def test_intercept_setter():
    from pymks import MKSHomogenizationModel
    from pymks import PrimitiveBasis
    p_basis = PrimitiveBasis(2)
    model = MKSHomogenizationModel(basis=p_basis)
    X = np.random.randint(2, size=(50, 10, 10))
    y = np.random.randint(2, size=(50, ))
    model.fit(X, y)
    intercept = model.intercept_
    model.intercept_ = intercept * 2
    assert np.allclose(model.intercept_, intercept * 2)
Esempio n. 15
0
def test_setting_kernel():
    from pymks.datasets import make_elastic_FE_strain_delta
    from pymks import MKSLocalizationModel
    from pymks import PrimitiveBasis
    elastic_modulus = (100, 130)
    poissons_ratio = (0.3, 0.3)
    X_delta, y = make_elastic_FE_strain_delta(size=(21, 21),
                                              elastic_modulus=elastic_modulus,
                                              poissons_ratio=poissons_ratio)
    p_basis = PrimitiveBasis(2)
    model = MKSLocalizationModel(basis=p_basis)
    model.fit(X_delta, y)
    coefs = model.coef_
    model.resize_coeff((30, 30))
    model.coef_ = coefs
    assert np.allclose(model.predict(X_delta), y, atol=1e-4)
Esempio n. 16
0
def test_stats_in_parallel():
    import time
    from pymks.bases import PrimitiveBasis
    from pymks.stats import correlate
    from pymks.datasets import make_microstructure
    p_basis = PrimitiveBasis(5)
    if p_basis._pyfftw:
        X = make_microstructure(n_samples=5, n_phases=3)
        t = []
        for i in range(1, 4):
            t_start = time.time()
            correlate(X, p_basis, n_jobs=i)
            t.append(time.time() - t_start)
            assert t == sorted(t, reverse=True)
    else:
        pass
Esempio n. 17
0
def test_autocorrelate_with_specific_correlations():
    from pymks.stats import autocorrelate
    from pymks import PrimitiveBasis
    X = np.array([[[1, 0, 1, 1], [1, 0, 1, 1], [0, 0, 2, 0], [0, 0, 0, 0],
                   [0, 0, 0, 0]]])
    autocorrelations = [(0, 0), (2, 2)]
    p_basis = PrimitiveBasis(n_states=3)
    X_auto = autocorrelate(X, p_basis, autocorrelations=autocorrelations)
    X_result_0 = np.array([[2 / 3., 1 / 3., 5 / 12., 4 / 9.],
                           [5 / 8., 5 / 12., 9 / 16., 1 / 2.],
                           [1 / 2., 7 / 15., 13 / 20., 7 / 15.],
                           [3 / 8., 1 / 2., 9 / 16., 5 / 12.],
                           [1 / 6., 4 / 9., 5 / 12., 1 / 3.]])
    assert np.allclose(X_auto[0, ..., 0], X_result_0)
    X_result_1 = np.array([[0., 0., 0., 0.], [0., 0., 0., 0.],
                           [0., 0., 0.05, 0.], [0., 0., 0., 0.],
                           [0., 0., 0., 0.]])
    assert np.allclose(X_auto[0, ..., 1], X_result_1)
Esempio n. 18
0
def test_store_correlations():
    from pymks import MKSStructureAnalysis
    from pymks import PrimitiveBasis
    from pymks.stats import correlate
    p_basis = PrimitiveBasis(2)
    model = MKSStructureAnalysis(basis=p_basis, store_correlations=True)
    X = np.random.randint(2, size=(2, 4, 4))
    model.fit(X)
    X = correlate(X, p_basis, correlations=[(0, 0), (0, 1)])
    assert np.allclose(X, model.fit_correlations)
    X_0 = np.random.randint(2, size=(2, 4, 4))
    model.transform(X_0)
    X_corr_0 = correlate(X_0, p_basis, correlations=[(0, 0), (0, 1)])
    assert np.allclose(X_corr_0, model.transform_correlations)
    X_1 = np.random.randint(2, size=(2, 4, 4))
    model.transform(X_1)
    X_corr_1 = correlate(X_1, p_basis, correlations=[(0, 0), (0, 1)])
    X_corr_ = np.concatenate((X_corr_0, X_corr_1))
    assert np.allclose(X_corr_, model.transform_correlations)
def plot_components(x, y, n_comps, linker_model, verbose=2):
  prim_basis = PrimitiveBasis(n_states=3, domain=[0, 2])
  model = MKSHomogenizationModel(basis=prim_basis,
                                 property_linker=linker_model)
  model.n_components = 5
  model.fit(x,y,periodic_axes=[0,1])

  print model.property_linker.coef_
  draw_components([model.reduced_fit_data[0:3, :2],
                   model.reduced_fit_data[3:6, :2],
                   model.reduced_fit_data[6:9, :2],
                   model.reduced_fit_data[9:11, :2],
                   model.reduced_fit_data[11:14, :2],
                   model.reduced_fit_data[14:16, :2],
                   model.reduced_fit_data[16:17, :2],
                   model.reduced_fit_data[17:18, :2]],
                   ['Ag:0.237	Cu:0.141	v:0.0525',
                    'Ag:0.237	Cu:0.141	v:0.0593',
                    'Ag:0.237	Cu:0.141	v:0.0773',
                    'Ag:0.237	Cu:0.141	v:0.0844',
                    'Ag:0.239	Cu:0.138	v:0.0791',
                    'Ag:0.239	Cu:0.138	v:0.0525',
                    'Ag:0.237	Cu:0.141	v:0.0914',
                    'Ag:0.237	Cu:0.141	v:0.0512'])
    metadatum['data'] = load_data('data/'+metadatum['filename'])
    # Get a representative slice from the block (or ave or whatever we decide on)
    best_slice = get_best_slice(metadatum['data'])
    # Get 2-pt Stats for the best slice
    print "--->Getting 2pt stats"
    metadatum['stats'] = get_correlations_for_slice(best_slice)
  
  print metadata[0]['stats'].shape
  # Construct X and Y for PCA and linkage
  print "-->Creating X and Y"
  i = 0
  for metadatum in metadata:
    x[i,0:6*metadatum['x']**2] = metadatum['stats'].flatten()
 
  
  prim_basis = PrimitiveBasis(n_states=3, domain=[0,2])
  x_ = prim_basis.discretize(metadata[0]['data'])
  x_corr = correlate(x_)
  draw_correlations(x_corr.real)
  quit()

  # Reduce all 2-pt Stats via PCA
  # Try linear reg on inputs and outputs
  reducer = PCA(n_components=3)
  linker = LinearRegression() 
  model = MKSHomogenizationModel(dimension_reducer=reducer,
                                 property_linker=linker,
                                 compute_correlations=False)
  model.n_components = 40
  model.fit(metadatum['stats'], y, periodic_axes=[0, 1]) 
  print model.reduced_fit_data
import matplotlib.pyplot as plt

from pymks import PrimitiveBasis
from pymks.datasets import make_checkerboard_microstructure
from pymks.stats import autocorrelate
from pymks.stats import crosscorrelate
from pymks.tools import draw_microstructures
from pymks.tools import draw_autocorrelations
from pymks.tools import draw_crosscorrelations

# Make checkerboard microstructure and observe it.
X = make_checkerboard_microstructure(square_size=10, n_squares=6)
draw_microstructures(X)

# Define the basis for 2-point statistics
prim_basis = PrimitiveBasis(n_states=2)
X_ = prim_basis.discretize(X)

# Computing auto-correlatiions of the microstructure function and drawing the same
X_auto = autocorrelate(X,
                       basis=PrimitiveBasis(n_states=2),
                       periodic_axes=(0, 1))
correlations = [('white', 'white'), ('black', 'black')]
draw_autocorrelations(X_auto[0], autocorrelations=correlations)

# Checking the volume fraction of both the phases i.e. (0,0) value of auto-correlations
centre = (X_auto.shape[1] + 1) / 2
print('Volume fraction of black phase', X_auto[0, centre, centre, 0])
print('Volume fraction of black phase', X_auto[0, centre, centre, 1])

# Computing the cross correlation of the microstructure function and drawing the same
Esempio n. 22
0
def analyze_data_slice(al_data_slice):
    # prim basis
    prim_basis = PrimitiveBasis(n_states=3, domain=[0, 2])
    disc_basis = prim_basis.discretize(al_data_slice)
    correlations = correlate(disc_basis)
    return correlations
# -*- coding: utf-8 -*-

import warnings
warnings.filterwarnings('ignore')
from pymks import PrimitiveBasis
from pymks.stats import autocorrelate
from pymks.tools import draw_autocorrelations

# Create list of autocorrelations corresponding to the binary image lists
two_point_correlations_precipitate = []

# Two states (black and white); using primitive basis for microstructure function
p_basis = PrimitiveBasis(n_states=2, domain=[0, 1])

for i in range(0, 144):
    two_point_correlations_precipitate.append(
        autocorrelate(binary_image_list_precipitate[i].reshape((1, 512, 512)),
                      p_basis,
                      periodic_axes=(0, 1),
                      autocorrelations=[(0, 0)]))
    # assuming both axes are periodic and only calculating the black autocorrelations

two_point_correlations_bicontinuous = []
for i in range(0, 48):
    two_point_correlations_bicontinuous.append(
        autocorrelate(binary_image_list_bicontinuous[i].reshape((1, 512, 512)),
                      p_basis,
                      periodic_axes=(0, 1),
                      autocorrelations=[(0, 0)]))

two_point_correlations_unknown = []
Esempio n. 24
0
def test_default_dimension_reducer():
    from sklearn.decomposition import PCA
    from pymks import MKSStructureAnalysis
    from pymks import PrimitiveBasis
    model = MKSStructureAnalysis(basis=PrimitiveBasis())
    assert isinstance(model.dimension_reducer, PCA)
Esempio n. 25
0
def test_default_property_linker():
    from sklearn.linear_model import LinearRegression
    from pymks import MKSHomogenizationModel, PrimitiveBasis
    prim_basis = PrimitiveBasis(n_states=2)
    model = MKSHomogenizationModel(basis=prim_basis)
    assert isinstance(model.property_linker, LinearRegression)
      print "--->RandomForest"
      linker = RandomForestClassifier()
      params = {'n_estimators':range(1,100,10)}
      opt_model = run_gridcv_linkage(y,x_pca,linker,params)
      print('---->n_est:'), (opt_model.best_estimator_.n_estimators)
      r2_mean, r2_std, mse_mean, mse_std = run_conventional_linkage(y,x_pca,5,opt_model)



  quit()



  print "-->Constructing Correlations"
  prim_basis = PrimitiveBasis(n_states=3, domain=[0,2])
  x_ = prim_basis.discretize(x)
  x_corr = correlate(x_, periodic_axes=[0, 1])
  x_corr_flat = np.ndarray(shape=(samples,  x_corr.shape[1]*x_corr.shape[2]*x_corr.shape[3]))
  row_ctr = 0
  for row in x_corr:
    x_corr_flat[row_ctr] = row.flatten()

  print x.shape
  flat_len = (x.shape[0],) + (np.prod(x.shape[1:]),)
  X_train, X_test, y_train, y_test = train_test_split(x.reshape(flat_len), y,
                                                    test_size=0.2, random_state=3)
  print(x_corr.shape)
  print(X_test.shape)
  # uncomment to view one containers
  #draw_correlations(x_corr[0].real)
Esempio n. 27
0
def predict(bq, log, table_url, predictor_url, reducer_url, ms_path, **kw):
    '''
    Predicts effective strength of 3-D RVE of a 2-phase composite with strength contrast s2/s1 = 5
    Args:
    - table_path - path to dream3d file containing microstructure data (phase labels)
    - predictor_path - path to sav file containing calibrated model (LinearRegression)
    - reducer_path - path to sav file containing dimensionality reducer (Principal Component Basis)
    - ms_path - path to microstructure data (phase lables) inside dream3d file
    Returns:
    - y - predicted effective strength
    '''

    log.debug('kw is: %s', str(kw))
    predictor_uniq = predictor_url.split('/')[-1]
    reducer_uniq = reducer_url.split('/')[-1]
    table_uniq = table_url.split('/')[-1]

    predictor_url = bq.service_url('blob_service', path=predictor_uniq)
    predictor_path = os.path.join(kw.get('stagingPath', ''), 'predictor.sav')
    predictor_path = bq.fetchblob(predictor_url, path=predictor_path)

    reducer_url = bq.service_url('blob_service', path=reducer_uniq)
    reducer_path = os.path.join(kw.get('stagingPath', ''), 'reducer.sav')
    reducer_path = bq.fetchblob(reducer_url, path=reducer_path)

    # ms_path default: '/DataContainers/SyntheticVolumeDataContainer/CellData/Phases'

    # Default settings for 2-pt stats
    p_axes = (0, 1, 2)
    corrs = [(1, 1)]

    # Read hdf5 table
    table_service = bq.service('table')

    # Get dataset
    data = table_service.load_array(table_uniq, ms_path.lstrip('/'))
    ms = np.squeeze(data)

    # f = h5py.File(table_path, 'r')
    # data = f[ms_path].value
    # ms = np.squeeze(data)

    # Get phase labels as local states
    states = np.unique(ms)
    if len(states) > 2:
        log.warn(
            'WARNING: Model is only for two-phase materials! All extra phases will be considered as the second (hard) phase'
        )
        ms[ms > states[0]] = states[0]

    ph_1 = np.min(states)
    ph_2 = np.max(states)

    s1 = 0.2
    s2 = 1.0
    eta = s2 / s1
    f1 = np.count_nonzero(ms == ph_1) * 1.0 / np.prod(ms.shape)
    f2 = np.count_nonzero(ms == ph_2) * 1.0 / np.prod(ms.shape)
    sbar_up = (f1 * s1) + (f2 * s2)

    sbar_low = (f1 / s1) + (f2 / s2)
    sbar_low = 1.0 / sbar_low

    # Get the size of the RVE
    if len(ms.shape) == 4:
        dims = ms.shape[1:4]
    elif len(ms.shape) == 3:
        dims = ms.shape
        ms = np.expand_dims(ms, 0)
    else:
        log.error('ERROR: 3-D RVE(s) are expected!')
        return None

    # Load model and dimensionality reducer
    predictor = joblib.load(predictor_path)
    reducer = joblib.load(reducer_path)

    # Get the number of PC components used
    n_comps = predictor.named_steps['poly'].n_input_features_

    # Get the size of the calibration RVE
    nx_cal = int(np.round((reducer.components_.shape[1])**(1.0 / 3.0)))
    dims_cal = np.array((nx_cal, nx_cal, nx_cal))

    # Compute 2-pt stats
    n_states = len(states)
    p_basis = PrimitiveBasis(n_states=n_states, domain=states)
    tps = correlate(ms, p_basis, periodic_axes=p_axes, correlations=corrs)

    # Check size of the provided MVE: truncate if large, pad if small
    if np.prod(dims) > reducer.components_.shape[1]:
        tps = truncate(tps,
                       [len(ms), dims_cal[0], dims_cal[1], dims_cal[2], 1])
        dims = dims_cal
        log.info(
            'Microstructure volume is larger than calibration RVE. 2-pt correlation function is truncated'
        )
    elif np.prod(dims) < reducer.components_.shape[1]:
        tps = pad(tps, [len(ms), dims_cal[0], dims_cal[1], dims_cal[2], 1])
        dims = dims_cal
        log.info(
            'Microstructure volume is smaller than calibration RVE. 2-pt correlation function is padded'
        )

    # Convert 2-pt stats to a vector
    tps_v = np.reshape(tps, (len(ms), np.prod(dims)))

    # Get low-dimensional representation
    x = reducer.transform(tps_v)

    # Get the property prediction
    y = predictor.predict(x[:, 0:n_comps])

    # outtable_xml = table_service.store_array(y, name='predicted_strength')
    # return [ outtable_xml ]
    out_strength_xml = """<tag name="Strength">
                                <tag name="Strength" type="string" value="%s"/>
                                <tag name="sbar_up" type="string" value="%s"/>
                                <tag name="sbar_low" type="string" value="%s"/>
                                <tag name="Volume Fraction" type="string" value="%s"/>
                                <tag name="link" type="resource" value="%s"/>
                          </tag>""" % (str(y[0] * eta), str(
        sbar_up * eta), str(
            sbar_low * eta), str(f1) + ', ' + str(f2), table_url)
    return [out_strength_xml]