def test_stress(): from pymks.datasets import make_elastic_stress_random from pymks import MKSHomogenizationModel, DiscreteIndicatorBasis sample_size = 200 grain_size = [(5, 5), (6, 4), (4, 6), (2, 2)] n_samples = [sample_size] * len(grain_size) elastic_modulus = (410, 200) poissons_ratio = (0.28, 0.3) macro_strain = 0.001 size = (21, 21) X, y = make_elastic_stress_random(n_samples=n_samples, size=size, grain_size=grain_size, elastic_modulus=elastic_modulus, poissons_ratio=poissons_ratio, macro_strain=macro_strain, seed=0) dbasis = DiscreteIndicatorBasis(n_states=2, domain=[0, 1]) model = MKSHomogenizationModel(basis=dbasis, n_components=3, degree=3) model.fit(X, y) test_sample_size = 1 n_samples = [test_sample_size] * len(grain_size) X_new, y_new = make_elastic_stress_random(n_samples=n_samples, size=size, grain_size=grain_size, elastic_modulus=elastic_modulus, poissons_ratio=poissons_ratio, macro_strain=macro_strain, seed=3) y_result = model.predict(X_new) assert np.allclose(np.round(y_new, decimals=2), np.round(y_result, decimals=2))
def plot_component_variance(x, y): prim_basis = PrimitiveBasis(n_states=3, domain=[0, 2]) model = MKSHomogenizationModel(basis=prim_basis) model.n_components = 20 model.fit(x, y, periodic_axes=[0, 1]) # Draw the plot containing the PCA variance accumulation draw_component_variance(model.dimension_reducer.explained_variance_ratio_)
def test_stress(): from pymks.datasets import make_elastic_stress_random from pymks import MKSHomogenizationModel, DiscreteIndicatorBasis sample_size = 200 grain_size = [(5, 5), (6, 4), (4, 6), (2, 2)] n_samples = [sample_size] * len(grain_size) elastic_modulus = (410, 200) poissons_ratio = (0.28, 0.3) macro_strain = 0.001 size = (21, 21) X, y = make_elastic_stress_random(n_samples=n_samples, size=size, grain_size=grain_size, elastic_modulus=elastic_modulus, poissons_ratio=poissons_ratio, macro_strain=macro_strain, seed=0) dbasis = DiscreteIndicatorBasis(n_states=2, domain=[0, 1]) model = MKSHomogenizationModel(basis=dbasis, n_components=3, degree=3) model.fit(X, y) test_sample_size = 1 n_samples = [test_sample_size] * len(grain_size) X_new, y_new = make_elastic_stress_random( n_samples=n_samples, size=size, grain_size=grain_size, elastic_modulus=elastic_modulus, poissons_ratio=poissons_ratio, macro_strain=macro_strain, seed=3) y_result = model.predict(X_new) assert np.allclose(np.round(y_new, decimals=2), np.round(y_result, decimals=2))
def test_intercept_setter(): from pymks import MKSHomogenizationModel from pymks import PrimitiveBasis p_basis = PrimitiveBasis(2) model = MKSHomogenizationModel(basis=p_basis) X = np.random.randint(2, size=(50, 10, 10)) y = np.random.randint(2, size=(50, )) model.fit(X, y) intercept = model.intercept_ model.intercept_ = intercept * 2 assert np.allclose(model.intercept_, intercept * 2)
def test_coef_setter(): from pymks import MKSHomogenizationModel from pymks import PrimitiveBasis p_basis = PrimitiveBasis(2) model = MKSHomogenizationModel(basis=p_basis) X = np.random.randint(2, size=(50, 10, 10)) y = np.random.randint(2, size=(50, )) model.fit(X, y) coefs = model.coef_ model.coef_ = coefs * 2 assert np.allclose(model.coef_, coefs * 2)
def test_intercept_setter(): from pymks import MKSHomogenizationModel from pymks import PrimitiveBasis p_basis = PrimitiveBasis(2) model = MKSHomogenizationModel(basis=p_basis) X = np.random.randint(2, size=(50, 10, 10)) y = np.random.randint(2, size=(50,)) model.fit(X, y) intercept = model.intercept_ model.intercept_ = intercept * 2 assert np.allclose(model.intercept_, intercept * 2)
def test_coef_setter(): from pymks import MKSHomogenizationModel from pymks import PrimitiveBasis p_basis = PrimitiveBasis(2) model = MKSHomogenizationModel(basis=p_basis) X = np.random.randint(2, size=(50, 10, 10)) y = np.random.randint(2, size=(50,)) model.fit(X, y) coefs = model.coef_ model.coef_ = coefs * 2 assert np.allclose(model.coef_, coefs * 2)
def plot_components(x, y, n_comps, linker_model, verbose=2): prim_basis = PrimitiveBasis(n_states=3, domain=[0, 2]) model = MKSHomogenizationModel(basis=prim_basis, property_linker=linker_model) model.n_components = 5 model.fit(x,y,periodic_axes=[0,1]) print model.property_linker.coef_ draw_components([model.reduced_fit_data[0:3, :2], model.reduced_fit_data[3:6, :2], model.reduced_fit_data[6:9, :2], model.reduced_fit_data[9:11, :2], model.reduced_fit_data[11:14, :2], model.reduced_fit_data[14:16, :2], model.reduced_fit_data[16:17, :2], model.reduced_fit_data[17:18, :2]], ['Ag:0.237 Cu:0.141 v:0.0525', 'Ag:0.237 Cu:0.141 v:0.0593', 'Ag:0.237 Cu:0.141 v:0.0773', 'Ag:0.237 Cu:0.141 v:0.0844', 'Ag:0.239 Cu:0.138 v:0.0791', 'Ag:0.239 Cu:0.138 v:0.0525', 'Ag:0.237 Cu:0.141 v:0.0914', 'Ag:0.237 Cu:0.141 v:0.0512'])
# Get a representative slice from the block (or ave or whatever we decide on) best_slice = get_best_slice(metadatum['data']) # Get 2-pt Stats for the best slice print "--->Getting 2pt stats" metadatum['stats'] = get_correlations_for_slice(best_slice) print metadata[0]['stats'].shape # Construct X and Y for PCA and linkage print "-->Creating X and Y" i = 0 for metadatum in metadata: x[i,0:6*metadatum['x']**2] = metadatum['stats'].flatten() prim_basis = PrimitiveBasis(n_states=3, domain=[0,2]) x_ = prim_basis.discretize(metadata[0]['data']) x_corr = correlate(x_) draw_correlations(x_corr.real) quit() # Reduce all 2-pt Stats via PCA # Try linear reg on inputs and outputs reducer = PCA(n_components=3) linker = LinearRegression() model = MKSHomogenizationModel(dimension_reducer=reducer, property_linker=linker, compute_correlations=False) model.n_components = 40 model.fit(metadatum['stats'], y, periodic_axes=[0, 1]) print model.reduced_fit_data
# Optimizing polynomial degreee and number of components '''params_to_tune = {'degree': np.arange(1, 4), 'n_components': np.arange(1, 8)} fit_params = {'size': dataset[0].shape, 'periodic_axes': [0, 1]} gs = GridSearchCV(model, params_to_tune, cv=3, n_jobs=3, fit_params=fit_params).fit(data_train, stress_train)''' # model = gs.best_estimator_ model.n_components = 4 model.degree = 2 print('Components'), (model.n_components) print('Polynomail Order'), (model.degree) # Fit data to model model.fit(dataset, stresses, periodic_axes=[0, 1]) shapes = (data_test.shape[0],) + (dataset.shape[1:]) print shapes data_test = data_test.reshape(shapes) stress_predict = model.predict(data_test, periodic_axes=[0, 1]) labels = 'Long X', 'Short X', 'Long Y', 'Short Y' # Draw PCA plot draw_components([model.reduced_fit_data[:, :2], model.reduced_predict_data[:, :2]], ['Training Data', 'Testing Data']) # # Draw goodness of fit