def resp(ns, strt, name, set_id, bc, newdir): start = time.time() C = const() f = h5py.File("responses.hdf5", 'a') # nwd = os.getcwd() + '\\' + newdir nwd = os.getcwd() + '/' + newdir # for unix os.chdir(nwd) fname = "%s_%s.txt" % (name, bc) tmp = np.loadtxt(fname) dset_name = "yield_%s_%s" % (bc, set_id) f.create_dataset(dset_name, data=tmp[strt:strt+ns, 2]) dset_name = "stiffness_%s_%s" % (bc, set_id) f.create_dataset(dset_name, data=tmp[strt:strt+ns, 5]) # return to the original directory os.chdir('..') f.close() end = time.time() timeE = np.round((end - start), 3) msg = 'responses read from .txt file for %s_%s: %s seconds' \ % (name, bc, timeE) rr.WP(msg, C['wrt_file'])
def combine(): C = constants.const() filename = 'log_combine_coef.txt' """ Combine the results of the coefficient determination""" # coeff is the combined vector of coefficients as calculated by the # orthogonal regression coef = np.zeros((C['cmax'], 10), dtype='complex128') c = 0 # for tnum in xrange(596): for tnum in xrange(C['integrate_njobs']): fn.WP(str(tnum), filename) # load partially filled coefficient arrays from each file f = h5py.File(C['integrate_output'] % str(tnum).zfill(5), 'r') coef_prt = f.get('coef_prt')[...] f.close() clen = coef_prt.shape[0] coef[c:c + clen, :] = coef_prt c += clen # save the coefficients file f = h5py.File(C['combinecoef_coef'], 'w') f.create_dataset('coef', data=coef) f.close()
def read_euler(strt, ns, set_id, newdir, funit): start = time.time() C = const() euler = np.zeros([ns, 3, C['el']**3]) # nwd = os.getcwd() + '\\' + newdir nwd = os.getcwd() + '/' + newdir # for unix os.chdir(nwd) for ii in xrange(ns): sn = strt + ii + 1 filename = "Ti64_Dream3D_v01_Output_%s.vtk" % sn euler[ii, :, :] = rr.read_vtk_vector(filename=filename) if funit == 1: euler = euler * (np.pi / 180.) # return to the original directory os.chdir('..') f = h5py.File("spatial.hdf5", 'a') dset_name = 'euler_%s' % set_id f.create_dataset(dset_name, data=euler) f.close() end = time.time() timeE = np.round((end - start), 3) msg = 'euler angles read from .vtk file for %s: %s seconds' \ % (set_id, timeE) rr.WP(msg, C['wrt_file'])
def fegrab(ns, strt, set_id, dir): # FINITE ELEMENT RESPONSES st = time.time() C = const() r_fem = np.zeros([ns, C['el']**3]) nwd = os.getcwd() + '/' + dir # for unix # nwd = os.getcwd() + '\\' + direc os.chdir(nwd) for ii in xrange(ns): sn = strt + ii + 1 filename = 'sq21_50test_%s.dat' % sn r_fem[ii, ...] = res_red(filename) r_fem = r_fem.reshape(ns, C['el'], C['el'], C['el']) os.chdir('..') f = h5py.File('responses.hdf5', 'a') f.create_dataset('y_sim_%s' % set_id, data=r_fem) f.close() msg = 'Load FE results from .dat files for set %s%s: %s seconds' \ % (ns, set_id, np.round((time.time() - st), 3)) rr.WP(msg, C['wrt_file'])
def combine(): C = constants.const() filename = 'log_Xcalc_combine.txt' f_master = h5py.File(C['combineXcalc_output'], 'w') """load the cosine basis evaluations""" f_cos = h5py.File(C['Xcalccos_output'], 'r') for name in f_cos.keys(): fn.WP(name, filename) tmp = f_cos.get(name)[...] f_master.create_dataset(name, data=tmp) del tmp f_cos.close() """load the GSH basis evaluations""" for jobnum in xrange(C['XcalcGSH_njobs']): f_gsh = h5py.File(C['XcalcGSH_output'] % str(jobnum).zfill(5), 'r') for name in f_gsh.keys(): fn.WP(name, filename) tmp = f_gsh.get(name)[...] f_master.create_dataset(name, data=tmp) del tmp f_gsh.close() f_master.close()
def regress(ns, set_id): st = time.time() C = const() """load the feature data""" f = h5py.File("pre_regress_%s.hdf5" % set_id, 'r') X = f.get('X')[...] f.close() """load the dependent variable data""" f = h5py.File("responses.hdf5", 'r') y = f.get('fip_%s' % set_id)[...] y = y.reshape((C['n_samp'])) f.close() # clf = svm.SVR() # clf = neighbors.KNeighborsRegressor(n_neighbors=1, weights='uniform') clf = tree.DecisionTreeRegressor(max_depth=10) clf.fit(X, y) joblib.dump(clf, 'modelfit.pkl') timeE = np.round(time.time() - st, 1) msg = "fit completed: %s s" % timeE rr.WP(msg, C['wrt_file'])
def calculate(): C = constants.const() filename = 'Xcalc_log_cos.txt' """ Load info from collected simulation info file """ f = h5py.File(C['combineread_output'], 'r') var_set = f.get('var_set') theta = np.sort(np.unique(var_set[:, 0])) et_norm = np.sort(np.unique(var_set[:, 4])) f.close f = h5py.File(C['basis_eval_cos'], 'a') """Evalute the cosine basis functions for theta""" for q in xrange(C['N_q']): vec = np.cos(q * np.pi * theta / C['L_th']) set_id = 'q_%s' % str(q).zfill(5) f.create_dataset(set_id, data=vec) fn.WP(set_id, filename) """Evalute the cosine basis functions for en""" for r in xrange(C['N_r']): vec = np.cos(r * np.pi * (et_norm - C['a']) / C['L_en']) set_id = 'r_%s' % str(r).zfill(5) f.create_dataset(set_id, data=vec) fn.WP(set_id, filename) f.close()
def read_fip(ns, set_id, newdir): start = time.time() C = const() fip = np.zeros([ns, C['el']**3]) # nwd = os.getcwd() + '\\' + newdir nwd = os.getcwd() + '/' + newdir # for unix os.chdir(nwd) sn = 0 for filename in os.listdir(nwd): if filename.endswith('%s.vtk' % C['step']): fip[sn, :] = rr.read_vtk_scalar(filename=filename) sn += 1 """return to the original directory""" os.chdir('..') f = h5py.File("responses.hdf5", 'a') f.create_dataset('fip_%s' % set_id, data=(1e9) * fip) f.close() end = time.time() timeE = np.round((end - start), 3) msg = 'fip values read from .vtk file for %s: %s seconds' % (set_id, timeE) rr.WP(msg, C['wrt_file'])
def read_euler(ns, set_id, newdir, funit): start = time.time() C = const() euler = np.zeros([ns, 3, C['el']**3]) # nwd = os.getcwd() + '\\' + newdir nwd = os.getcwd() + '/' + newdir # for unix os.chdir(nwd) sn = 0 for filename in os.listdir(nwd): if filename.endswith('%s.vtk' % C['step']): euler[sn, :, :] = rr.read_vtk_vector(filename=filename) sn += 1 if funit == 1: euler = euler * (np.pi / 180.) """return to the original directory""" os.chdir('..') f = h5py.File("spatial.hdf5", 'a') f.create_dataset('euler_%s' % set_id, data=euler) f.close() end = time.time() timeE = np.round((end - start), 3) msg = 'euler angles read from .vtk file for %s: %s seconds' % (set_id, timeE) rr.WP(msg, C['wrt_file'])
def pltcorr(ns, set_id, step, sn, iA, iB): C = const() f = h5py.File("spatial.hdf5", 'r') dset_name = 'euler_%s' % set_id euler = f.get(dset_name)[sn, 0, :].reshape(C['el'], C['el'], C['el']) corr = f.get('ff_%s' % set_id)[sn, iA, iB, ...] f.close() corr_centered = np.fft.fftshift(corr) """Plot slices of the response""" plt.figure(num=1, figsize=[8, 2.7]) plt.subplot(121) ax = plt.imshow(euler[0, :, :], origin='lower', interpolation='none', cmap='magma') plt.colorbar(ax) plt.title('phi1 field') plt.subplot(122) ax = plt.imshow(corr_centered[10, :, :], origin='lower', interpolation='none', cmap='viridis') plt.colorbar(ax) plt.title('ff: %s, %s' % (iA, iB)) plt.show()
def calculate(): C = constants.const() filename = 'Xcalc_log_cos.txt' """ Load info from collected simulation info file """ f = h5py.File(C['combineread_output'], 'r') var_set = f.get('var_set') theta = np.sort(np.unique(var_set[:, 0])) msg = "theta vec: %s" % str(theta * (180 / np.pi)) fn.WP(msg, filename) f.close f = h5py.File(C['basiscos_output'], 'a') """Evalute the cosine basis functions for theta""" for q in xrange(C['N_q']): vec = np.cos(q * np.pi * theta / C['L_th']) set_id = 'q_%s' % str(q).zfill(5) f.create_dataset(set_id, data=vec) fn.WP(set_id, filename) f.close()
def combine(): C = constants.const() f1 = h5py.File(C['combineread_output'], 'w') alldata = f1.create_dataset("var_set", (C['n_eul'] * C['n_th'], 14)) c = 0 for tt in xrange(C['n_th']): print "Deformation Mode: %s deg" % str((tt + 0.5) * C['inc']) # create file for pre-database outputs f2 = h5py.File(C['read_output'] % str(tt).zfill(5), 'r') ep_tmp = f2.get("var_set") stt = (c) * C['n_eul'] print "start index: %s" % stt end = (c + 1) * C['n_eul'] print "end index: %s" % end alldata[stt:end, :] = ep_tmp f2.close() c += 1 print alldata.shape f1.close()
def correlate(ns, set_id): st = time.time() C = const() f = h5py.File("spatial.hdf5", 'a') M = f.get('M_%s' % set_id)[...] ff = f.create_dataset("ff_%s" % set_id, (ns, C['H'], C['H'], C['el'], C['el'], C['el']), dtype='float64') S = C['el']**3 cmax = C['H'] * C['H'] cmat = np.unravel_index(np.arange(cmax), [C['H'], C['H']]) cmat = np.array(cmat).T for c in xrange(cmax): ii, jj = cmat[c, :] if np.mod(c, 20) == 0: print str([ii, jj]) M1 = M[:, ii, ...] mag1 = np.abs(M1) ang1 = np.arctan2(M1.imag, M1.real) exp1 = np.exp(-1j * ang1) term1 = mag1 * exp1 del M1, mag1, ang1, exp1 M2 = M[:, jj, ...] mag2 = np.abs(M2) ang2 = np.arctan2(M2.imag, M2.real) exp2 = np.exp(1j * ang2) term2 = mag2 * exp2 del M2, mag2, ang2, exp2 FFtmp = term1 * term2 / S del term1, term2 tmp = np.fft.ifftn(FFtmp, [C['el'], C['el'], C['el']], [1, 2, 3]) ff[:, ii, jj, ...] = tmp.real if c == 0: szgb = np.round(C['H'] * C['H'] * FFtmp.nbytes / (1e9), 3) msg = "ff = %s gb" % szgb rr.WP(msg, C['wrt_file']) f.close() timeE = np.round(time.time() - st, 5) msg = "correlations computed for %s: %ss" % (set_id, timeE) rr.WP(msg, C['wrt_file'])
def new_space(ns_set, set_id_set): st = time.time() C = const() print "H: %s" % C['H'] n_corr = C['H']**2 ns_tot = np.sum(ns_set) allcorr = np.zeros((ns_tot, n_corr * C['el']**3), dtype='float64') f_stats = h5py.File("spatial.hdf5", 'a') """here we will treat the real and imaginary parts of ff as separate dimensions prior to applying PCA""" c = 0 for ii in xrange(len(set_id_set)): tmp = f_stats.get('ff_%s' % set_id_set[ii])[...] ff = tmp.reshape(ns_set[ii], n_corr * C['el']**3) allcorr[c:c + ns_set[ii], ...] = ff c += ns_set[ii] f_stats.close() msg = "correlations combined" rr.WP(msg, C['wrt_file']) f_master = h5py.File("pca_data.hdf5", 'w') """Note that when whiten=True the information about the relative variances of the pc vectors. This may be desirable when using regression to find a linkage to reduce some numerical issues""" pca = PCA(n_components=C['n_pc_tot'], whiten=True) pca.fit(allcorr) ratios = 100 * pca.explained_variance_ratio_ f_master.create_dataset('ratios', data=ratios) ratios = np.round(ratios, 1) msg = "pca explained variance: %s%%" % str(ratios) rr.WP(msg, C['wrt_file']) f_master.close() msg = "PCA completed: %ss" % np.round(time.time() - st, 5) rr.WP(msg, C['wrt_file']) return pca
def calculate(): C = constants.const() filename = 'Xcalc_log_cos.txt' """ Load info from collected simulation info file """ f = h5py.File(C['combineread_output'], 'r') var_set = f.get('var_set') theta = var_set[:, 0] et_norm = var_set[:, 4] f.close f = h5py.File(C['Xcalccos_output'], 'a') """Evalute the cosine basis functions for theta""" st = time.time() for q in xrange(C['N_q']): vec = np.cos(q*np.pi*theta/C['L_th']) set_id = 'q_%s' % str(q).zfill(5) f.create_dataset(set_id, data=vec) fn.WP(set_id, filename) msg = "Cosine basis evaluation for theta complete: %ss" \ % np.round(time.time()-st, 3) fn.WP(msg, filename) """Evalute the cosine basis functions for en""" st = time.time() for r in xrange(C['N_r']): vec = np.cos(r*np.pi*(et_norm-C['a'])/C['L_en']) set_id = 'r_%s' % str(r).zfill(5) f.create_dataset(set_id, data=vec) fn.WP(set_id, filename) msg = "Cosine basis evaluation for en complete: %ss" \ % np.round(time.time()-st, 3) fn.WP(msg, filename) f.close()
def read_meas(ns, set_id, comp, tensor_id, newdir): start = time.time() C = const() typ = ['sigma', 'epsilon_t', 'epsilon_p'] # nwd = os.getcwd() + '\\' + newdir nwd = os.getcwd() + '/' + newdir # for unix os.chdir(nwd) compd = {'11': 0, '22': 4, '33': 8, '12': 1, '13': 6, '23': 5} compp = compd[comp] r_fem = np.zeros([ns, C['el'], C['el'], C['el']]) sn = 0 for filename in os.listdir(nwd): if filename.endswith('%s.vtk' % C['step']): r_temp = rr.read_vtk_tensor(filename=filename, tensor_id=tensor_id, comp=compp) r_fem[sn, ...] = r_temp.reshape([C['el'], C['el'], C['el']]) sn += 1 """return to the original directory""" os.chdir('..') f = h5py.File("responses.hdf5", 'a') f.create_dataset('%s_%s' % (typ[tensor_id], set_id), data=r_fem) f.close() # """FFT OF RESPONSE FIELD""" # f = h5py.File("D_%s%s_s%s.hdf5" % (ns, set_id, step), 'a') # tmp = np.fft.fftn(r_fem, axes=[1, 2, 3]) # print tmp.shape # f.create_dataset('rfft%s_%s' % (comp, typ[tensor_id]), data=tmp) # f.close() end = time.time() timeE = np.round((end - start), 3) msg = 'The measure of interest has been read from .vtk file' \ ' for %s, component %s, type %s: %s seconds' % (set_id, comp, typ[tensor_id], timeE) rr.WP(msg, C['wrt_file'])
def features(ns, set_id): st = time.time() C = const() """gather the independent variable data""" f = h5py.File("spatial.hdf5", 'r') neig = f.get('neig_%s' % set_id)[...] neig = neig.reshape((C['n_samp'], C['H'], C['cmax'])) f.close() """calculate the X matrix""" X = np.zeros((C['n_samp'], C['xmax']), dtype='float64') c = 0 # keep track of position in X """for 0th order polynomial""" X[:, 0] = 1 c += 1 """for 1st order polynomial""" Imax = C['H']*C['cmax'] Imat = np.unravel_index(np.arange(Imax), (C['H'], C['cmax'])) Imat = np.array(Imat).T for I in xrange(Imax): h, pos = Imat[I, :] X[:, c] = neig[:, h, pos] c += 1 """for 2nd order polynomial""" Imax = C['H']*C['cmax']**2 Imat = np.unravel_index(np.arange(Imax), (C['H'], C['cmax'], C['cmax'])) Imat = np.array(Imat).T for I in xrange(Imax): h, pos1, pos2 = Imat[I, :] X[:, c] = neig[:, h, pos1]*neig[:, h, pos2] c += 1 f = h5py.File("pre_regress_%s.hdf5" % set_id, 'w') f.create_dataset('X', data=X) f.close() timeE = np.round(time.time()-st, 1) msg = "features extracted for %s: %s s" % (set_id, timeE) rr.WP(msg, C['wrt_file'])
def res_red(filename): """ Summary: This function reads the E11 values from a .dat file and reorganizes the data into a el x el x el array with the correct organization It will also plot a certain x-slice in the dataset if called within this script. Inputs: filename (string): the name of the '.dat' file containing the FEM response el (int): the number of elements per side of the microstructure cube Outputs: r_mat ([el,el,el],float): the FEM response of the '.dat' file of interest """ C = const() f = open(filename, "r") linelist = f.readlines() # finds a location several lines above the start of the data # linelist[n] reads the entire line at location n for ln in xrange(1000): if 'THE FOLLOWING TABLE' in linelist[ln]: break # line0 is the index of first line of the data line0 = ln + 5 r_mat = np.zeros([C['el']**3, 8]) c = -1 # this series of loops generates a 9261x8 dataset of E11s # (element x integration point) for k in xrange(C['el']**3): for jj in xrange(8): c += 1 r_mat[k, jj] = linelist[line0 + c].split()[2] f.close() # here we average all 8 integration points in each element cell r_mat = np.mean(r_mat, axis=1) return r_mat
def get_M(ns, set_id): start = time.time() C = const() """get the euler angle files""" f = h5py.File("spatial.hdf5", 'a') euler = f.get('euler_%s' % set_id)[...] mf = np.zeros([ns, C['H'], C['el']**3], dtype='float64') c = 0 for h in xrange(C['H']): tmp = gsh.gsh_eval(euler.swapaxes(1, 2), [h]) tmp = np.squeeze(tmp) mf[:, c, :] = tmp.real c += 1 # mf[:, h, :] = (2*indxvec[h, 0]+1)*tmp # 2*l+1 included in maple generator end = time.time() timeE = np.round((end - start), 3) msg = "Conversion from Euler angles to GSH coefficients completed:" + \ " %s seconds" % timeE rr.WP(msg, C['wrt_file']) mf = mf.reshape([ns, C['H'], C['el'], C['el'], C['el']]) # MICROSTRUCTURE FUNCTIONS IN FREQUENCY SPACE start = time.time() M = np.fft.fftn(mf, axes=[2, 3, 4]) del mf f.create_dataset('M_%s' % set_id, data=M) f.close() end = time.time() timeE = np.round((end - start), 3) msg = "FFT3 conversion of mf to M for %s: %s seconds" % \ (set_id, timeE) rr.WP(msg, C['wrt_file']) msg = 'Size of M: %s gb' % str(M.nbytes/(1e9)) rr.WP(msg, C['wrt_file'])
def pltcheck(ns, set_id): C = const() """load the simulated and predicted responses""" f = h5py.File("responses.hdf5", 'r') r_sim = f.get('fip_%s' % set_id)[...] r_sim = r_sim.reshape((C['n_samp'])) f.close() f = h5py.File('validation_%s.hdf5' % set_id, 'r') r_fit = f.get('r_fit')[...] f.close() r_sim = np.exp(r_sim) r_fit = np.exp(r_fit) """plot the prediction equal to simulation line""" plt.figure(num=6, figsize=[9, 8.5]) minval = np.min([r_sim]) maxval = np.max([r_sim]) valrange = maxval - minval minval_ = minval - 0.5 * valrange maxval_ = maxval + 0.5 * valrange line = np.array([minval_, maxval_]) plt.plot(line, line, 'k-') plt.plot(r_sim, r_fit, marker='o', markersize=7, color=[.7, .1, .1], linestyle='') minval_ = minval - 0.1 * valrange maxval_ = maxval + 0.1 * valrange plt.axis([minval_, maxval_, minval_, maxval_]) plt.title("predicted versus simulated ln(FIP)") plt.xlabel("simulation") plt.ylabel("prediction") plt.xticks(rotation=20) plt.yticks(rotation=20) plt.show()
def validate(ns, set_id): st = time.time() C = const() """load the feature data""" f = h5py.File("pre_regress_%s.hdf5" % set_id, 'r') X = f.get('X')[...] f.close() """gather the dependent variable data""" f = h5py.File("responses.hdf5", 'r') r_sim = f.get('fip_%s' % set_id)[...] r_sim = r_sim.reshape((C['n_samp'])) f.close() """retrieve the coefficient set from the regression""" f = h5py.File("regress_results.hdf5", 'r') coef = f.get('coef')[...] f.close() """evalute the fit response""" r_fit = np.dot(coef, X.T) f = h5py.File('validation_%s.hdf5' % set_id, 'w') f.create_dataset('r_fit', data=r_fit) f.close() """evalute error metrics""" err = np.abs(r_sim-r_fit) err_mean = err.mean() err_max = err.max() r_sim_mmm = np.array([r_sim.min(), r_sim.mean(), r_sim.max()]) r_fit_mmm = np.array([r_fit.min(), r_fit.mean(), r_fit.max()]) print "r_sim min, mean and max: %s" % str(r_sim_mmm) print "r_fit min, mean and max: %s" % str(r_fit_mmm) print "mean error: %s" % err_mean print "max error: %s" % err_max timeE = np.round(time.time()-st, 1) msg = "validation completed for %s: %s s" % (set_id, timeE) rr.WP(msg, C['wrt_file'])
def pltevd(sn, set_id): C = const() """get the x, y data for plotting the evd""" f = h5py.File("raw_responses.hdf5", 'r') rawfip = f.get('fip_%s' % set_id) x = np.sort(rawfip[sn, :]) x = x[np.int64(C['pcnt']*x.size):, None] y = (np.arange(x.size)+1)/np.float32(x.size) f.close """retrieve the coefficients""" f = h5py.File("responses.hdf5", 'a') c0 = f.get('c0_%s' % set_id)[sn] c1 = f.get('c1_%s' % set_id)[sn] c2 = f.get('c2_%s' % set_id)[sn] f.close() """plot the original data and the fits""" plt.figure() plt.plot(np.log(x), y, 'b.', markersize=3) tmp = np.linspace(np.log(x).min(), np.log(x).max(), 100) x_ = np.exp(tmp) plt.plot(np.log(x_), ss.gamma.cdf(x_, c0, loc=c1, scale=c2), 'r-', lw=2, label='gamma cdf') ymin = y.min() ymax = y.max() rng = ymax - ymin ymin += -0.1*rng ymax += 0.1*rng plt.ylim((ymin, ymax)) plt.show()
def read(ns, strt, set_id, newdir): start = time.time() C = const() tmp = np.loadtxt("micr.txt", dtype='int16', delimiter=',').T micr = tmp[strt:strt+ns, :] f = h5py.File("spatial.hdf5", 'a') f.create_dataset('micr_%s' % set_id, data=micr) f.close() end = time.time() timeE = np.round((end - start), 3) msg = 'microstructures read from .txt file for %s: %s seconds' % (set_id, timeE) rr.WP(msg, C['wrt_file'])
def get_mf(ns, set_id): st = time.time() C = const() """get the microstructure files""" f = h5py.File("spatial.hdf5", 'a') micr = f.get('micr_%s' % set_id)[...] mf = np.zeros([ns, C['H'], C['el']**3], dtype='int16') for h in xrange(C['H']): mf[:, h, :] = micr == h f.create_dataset('mf_%s' % set_id, data=mf) f.close() end = time.time() timeE = np.round((end - st), 3) msg = "mf calculated: %s seconds" % timeE rr.WP(msg, C['wrt_file'])
def neighbors(ns, set_id): st = time.time() C = const() f = h5py.File("spatial.hdf5", 'a') mf = f.get('mf_%s' % set_id)[...] mf = mf.swapaxes(1, 2) mf = mf.reshape((ns, C['el'], C['el'], C['el'], C['H'])) exth = np.floor(0.5 * C['ext']) cvec = np.arange(C['cmax']) cmat = np.unravel_index(cvec, (C['ext'], C['ext'], C['ext'])) cmat = np.array(cmat).T neig = np.zeros((ns, C['el'], C['el'], C['el'], C['H'], C['cmax']), dtype='int16') for cc in cvec: ii, jj, kk = cmat[cc, :] inx = np.int16(ii - exth) iny = np.int16(jj - exth) inz = np.int16(kk - exth) tmp = np.roll(mf, inx, 1) tmp = np.roll(tmp, iny, 2) tmp = np.roll(tmp, inz, 3) neig[..., cc] = tmp neig = neig.reshape((ns, C['el']**3, C['H'], C['cmax'])) neig = f.create_dataset('neig_%s' % set_id, data=neig) f.close() timeE = np.round(time.time() - st, 5) msg = "neighbors found for %s: %ss" % (set_id, timeE) rr.WP(msg, C['wrt_file'])
def get_mf(ns, set_id): st = time.time() C = const() """get the euler angle files""" f = h5py.File("spatial.hdf5", 'a') euler = f.get('euler_%s' % set_id)[...] mf = np.zeros([ns, C['H'], C['el']**3], dtype='float64') for h in xrange(C['H']): tmp = gsh.gsh_eval(euler.swapaxes(1, 2), [h]) tmp = np.squeeze(tmp) mf[:, h, :] = tmp # 2*l+1 included in maple generator f.create_dataset('mf_%s' % set_id, data=mf) f.close() end = time.time() timeE = np.round((end - st), 3) msg = "mf calculated: %s seconds" % timeE rr.WP(msg, C['wrt_file'])
def transform(ns, set_id, pca): st = time.time() C = const() n_corr = C['H']**2 f_red = h5py.File("spatial_reduced.hdf5", 'a') f_stats = h5py.File("spatial.hdf5", 'r') ff = f_stats.get('ff_%s' % set_id)[...] ff = ff.reshape(ns, n_corr * C['el']**3) tmp = pca.transform(ff) f_red.create_dataset('reduced_%s' % set_id, data=tmp, dtype='float64') f_red.close() f_stats.close() timeE = np.round(time.time() - st, 2) msg = "transform to low dimensional space, %s: %s s" % (set_id, timeE) rr.WP(msg, C['wrt_file'])
import plot_correlation as pltcorr import plot_pc_map as pltmap import plot_response as pr import plot_linkage_check as plc import explained_variance as ev import plot_pc_vs_poly as pltpcpoly import numpy as np from constants import const C = const() names_cal = C['names_cal'] set_id_cal = C['set_id_cal'] strt_cal = C['strt_cal'] ns_cal = C['ns_cal'] dir_cal = C['dir_cal'] names_val = C['names_val'] set_id_val = C['set_id_val'] strt_val = C['strt_val'] ns_val = C['ns_val'] dir_val = C['dir_val'] par = 'c4' # """Plot an autocorrelation""" # sn = 0 # iA = 1 # iB = 1 # pltcorr.pltcorr(ns_cal[0], set_id_cal[0], sn, iA, iB) """Plot the percentage explained variance"""
def evalf(theta, euler, var_id, thr, LL_p): """variable assignments var_id 0: sigma'11 var_id 1: sigma'22 var_id 2: sigma'33 var_id 3: sigma'12 var_id 4: sigma'12 var_id 5: sigma'23 var_id 6: total shear rate var_id 7: w12 var_id 8: w13 var_id 9: w23 """ filename = "log_eval.txt" C = constants.const() f = h5py.File('coef.hdf5', 'r') coef = f.get('coef')[:, var_id] f.close() basis_info = gsh.gsh_basis_info() N_p_tmp = np.sum( basis_info[:, 0] <= LL_p) # number of GSH bases to evaluate N_pts = theta.size """Select the desired set of coefficients""" msg = "cmax: %s" % C['cmax'] fn.WP(msg, filename) cmat = np.unravel_index(np.arange(C['cmax']), C['N_tuple']) cmat = np.array(cmat).T cuttoff = thr * np.abs(coef).max() print "cutoff: %s" % cuttoff cuttoffvec = (np.abs(coef) > cuttoff) * \ (np.arange(C['cmax']) < N_p_tmp*C['N_q']) print "cuttoffvec.shape: %s" % str(cuttoffvec.shape) indxvec = np.arange(C['cmax'])[cuttoffvec] N_coef = indxvec.size pct_coef = 100. * N_coef / (N_p_tmp * C['N_q']) fn.WP("number of coefficients retained: %s" % N_coef, filename) fn.WP("percentage of coefficients retained %s%%" % np.round(pct_coef, 4), filename) """Evaluate the parts of the basis function individually""" st = time.time() p_U = np.unique(cmat[indxvec, 0]) q_U = np.unique(cmat[indxvec, 1]) all_basis_p = np.zeros([N_pts, C['N_p']], dtype='complex128') for p in p_U: all_basis_p[:, p] = np.squeeze(gsh.gsh_eval(euler, [p])) fn.WP("number of p basis functions used: %s" % p_U.size, filename) all_basis_q = np.zeros([N_pts, C['N_q']], dtype='complex128') for q in q_U: all_basis_q[:, q] = np.cos(q * np.pi * theta / C['L_th']) fn.WP("number of q basis functions used: %s" % q_U.size, filename) """Perform the prediction""" Y_ = np.zeros(theta.size, dtype='complex128') for ii in indxvec: p, q = cmat[ii, :] basis_p = all_basis_p[:, p] basis_q = all_basis_q[:, q] ep_set = basis_p * basis_q Y_ += coef[ii] * ep_set if np.mod(ii, 1000) == 0: msg = "evaluation complete for coefficient" +\ " %s out of %s" % (ii, N_coef) fn.WP(msg, filename) Ttime = np.round(time.time() - st, 3) msg = "total interpolation time: %ss" % Ttime fn.WP(msg, filename) msg = "interpolation time per point: %s" % (Ttime / theta.size) fn.WP(msg, filename) return Y_
import h5py import sys """ in this version of the code the id of the tensor is an argument to the script. trying to reduce the amount of data to analyse by half sampling in the angular variable """ # initialize important variables tnum = np.int64(sys.argv[1]) C = constants.const() # these indices are defined for the sampled db inputs sub2rad_eul = C['inc_eul']*np.pi/180. sub2rad_th = C['inc_th']*np.pi/180. # here we determine the sampling for en a_std = 0.0050 b_std = 0.0085 en_inc = 0.0001 # en increment et_norm = np.linspace(.0001, .0100, 100) ai = np.int64(np.round(a_std/en_inc))-1 # index for start of en range bi = np.int64(np.round(b_std/en_inc))-1 # index for end of en range sample_indx = np.arange(ai, bi+5, 3) n_en = sample_indx.size