def transform(el, ns, H, set_id, step, wrt_file): st = time.time() n_corr = H**2 f_red = h5py.File("sve_reduced.hdf5", 'a') f_stats = h5py.File("spatial_stats.hdf5", 'r') f_master = h5py.File("sve_reduced_all.hdf5", 'r') V = f_master.get('V')[...] corr_mean = f_master.get('corr_mean')[...] ff = f_stats.get('ff_%s' % set_id)[...] ff = ff.reshape(ns, n_corr * el**3) # subtract out the mean feature values ff_r = ff ff_r = ff_r - corr_mean # tmp = pca.transform(ff_r) # calculate the pc scores for ff_r tmp = np.dot(ff_r, V) f_red.create_dataset('reduced_%s' % set_id, data=tmp, dtype='complex128') # f_red.create_dataset('reduced_%s' % set_id_set[ii], # data=tmp, # dtype='float64') f_red.close() f_stats.close() f_master.close() timeE = np.round(time.time() - st, 2) msg = "transform to low dimensional space, %s: %s s" % (set_id, timeE) rr.WP(msg, wrt_file)
def euler_to_gsh(el, H, ns, set_id, step, wrt_file): start = time.time() f = h5py.File("data.hdf5", 'a') dset_name = 'euler_%s%s_s%s' % (ns, set_id, step) euler = f.get(dset_name)[...] euler = euler.swapaxes(1, 2) euler_GSH = np.zeros([ns, H, el**3], dtype='complex128') for sn in range(ns): tmp = gsh.gsh_eval(euler[sn, :, :], np.arange(H)) euler_GSH[sn, :, :] = tmp.T dset_name = 'euler_GSH_%s%s_s%s.npy' % (ns, set_id, step) f.create_dataset(dset_name, data=euler_GSH) f.close() end = time.time() timeE = np.round((end - start), 3) msg = "Conversion from Euler angles to GSH coefficients completed:"\ " %s seconds" % timeE rr.WP(msg, wrt_file)
def validation_procedure(ns_cal,ns_val,set_id_cal,set_id_val,step,comp,wrt_file): start = time.time() ## el is the # of elements per side of the cube el = 21 ## H is the number of GSH coefficients H = 15 M = np.load('M_%s%s_s%s.npy' %(ns_val,set_id_val,step)) specinfc = np.load('specinfc%s_%s%s_s%s.npy' %(comp,ns_cal,set_id_cal,step)) specinfc = np.reshape(specinfc,[el,el,el,H]) mks_R = np.zeros([el,el,el,ns_val]) for sn in xrange(ns_val): mks_F = np.sum(np.conjugate(specinfc) * M[:,:,:,sn,:],3) mks_R[:,:,:,sn] = np.fft.ifftn(mks_F).real np.save('mksR%s_%s%s_s%s' %(comp,ns_val,set_id_val,step), mks_R) end = time.time() timeE = np.round((end - start),3) msg = 'validation performed for component %s: %s seconds' %(comp, timeE) rr.WP(msg,wrt_file)
def inclusion(el, ns, set_id, step, wrt_file, vfrac): start = time.time() n_phase = len(vfrac) f = h5py.File("ref_%s%s_s%s.hdf5" % (ns, set_id, step), 'a') euler = f.create_dataset("euler", (ns, 3, el**3), dtype='float64') for sn in xrange(ns): euler[sn, 0, :] = reul(el**3) euler[sn, 1, :] = reul(el**3) euler[sn, 2, :] = reul(el**3) tmp = np.random.rand(el**3) for ii in xrange(n_phase): indx = (tmp > np.sum(vfrac[:ii])) * (tmp < np.sum(vfrac[: (ii + 1)])) euler[sn, 0, indx] = reul(np.sum(indx)) euler[sn, 1, indx] = reul(np.sum(indx)) euler[sn, 2, indx] = reul(np.sum(indx)) del indx f.close() end = time.time() timeE = np.round((end - start), 3) msg = "%s SVEs with inclusions generated with %s orientations: %ss" \ % (ns, n_phase, timeE) rr.WP(msg, wrt_file)
def validation(el, H, ns_cal, ns_val, set_id_cal, set_id_val, step, comp, typ, wrt_file): start = time.time() f = h5py.File("coef.hdf5", 'r') dset_name = 'coef%s_%s%s_s%s' % (comp, ns_cal, set_id_cal, step) coef = f.get(dset_name)[...].reshape(H, el, el, el) f.close() f = h5py.File("data.hdf5", 'a') dset_name = 'M_%s%s_s%s' % (ns_val, set_id_val, step) M = f.get(dset_name)[...] tmp = np.sum(np.conjugate(coef) * M, 1) mks_R = np.fft.ifftn(tmp, [el, el, el], [1, 2, 3]).real dset_name = '%s%s_mks_%s%s_s%s' % (typ, comp, ns_val, set_id_val, step) f.create_dataset(dset_name, data=mks_R) f.close() end = time.time() timeE = np.round((end - start), 3) msg = 'validation performed for component %s: %s seconds' % (comp, timeE) rr.WP(msg, wrt_file)
def read_euler(el, ns, set_id, step, newdir, wrt_file, funit): start = time.time() euler = np.zeros([ns, 3, el**3]) # nwd = os.getcwd() + '\\' + newdir nwd = os.getcwd() + '/' + newdir #for unix os.chdir(nwd) sn = 0 for filename in os.listdir(nwd): if filename.endswith('%s.vtk' % step): euler[sn, :, :] = rr.read_vtk_vector(filename=filename) sn += 1 if funit == 1: euler = euler * (np.pi / 180.) ## return to the original directory os.chdir('..') np.save('euler_%s%s_s%s' % (ns, set_id, step), euler) end = time.time() timeE = np.round((end - start), 3) msg = 'euler angles read from .vtk file for %s: %s seconds' % (set_id, timeE) rr.WP(msg, wrt_file)
def calibration_procedure(ns,set_id,step,comp,wrt_file): ## el is the # of elements per side of the cube el = 21 ## specify the number of local states you are using H = 15 M = np.load('M_%s%s_s%s.npy' %(ns,set_id,step)) r_fft = np.load('r%s_fft_%s%s_s%s.npy' %(comp,ns,set_id,step)) start = time.time() specinfc = np.zeros((el**3,H),dtype = 'complex64') ## here we perform the calibration for the scalar FIP specinfc[0,:] = rr.calib(0,M,r_fft,0,H,el,ns) [specinfc[1,:],p] = rr.calib(1,M,r_fft,0,H,el,ns) ## calib_red is simply calib with some default arguments calib_red = partial(rr.calib,M=M,r_fft=r_fft, p=p,H=H,el=el,ns=ns) specinfc[2:(el**3),:] = np.asarray(map(calib_red,range(2,el**3))) np.save('specinfc%s_%s%s_s%s' %(comp,ns,set_id,step),specinfc) end = time.time() timeE = np.round((end - start),3) msg = 'Calibration, component %s: %s seconds' %(comp, timeE) rr.WP(msg,wrt_file)
def read_euler(strt, ns, set_id, newdir, funit): start = time.time() C = const() euler = np.zeros([ns, 3, C['el']**3]) # nwd = os.getcwd() + '\\' + newdir nwd = os.getcwd() + '/' + newdir # for unix os.chdir(nwd) for ii in xrange(ns): sn = strt + ii + 1 filename = "Ti64_Dream3D_v01_Output_%s.vtk" % sn euler[ii, :, :] = rr.read_vtk_vector(filename=filename) if funit == 1: euler = euler * (np.pi / 180.) # return to the original directory os.chdir('..') f = h5py.File("spatial.hdf5", 'a') dset_name = 'euler_%s' % set_id f.create_dataset(dset_name, data=euler) f.close() end = time.time() timeE = np.round((end - start), 3) msg = 'euler angles read from .vtk file for %s: %s seconds' \ % (set_id, timeE) rr.WP(msg, C['wrt_file'])
def read_fip(ns, set_id, newdir): start = time.time() C = const() fip = np.zeros([ns, C['el']**3]) # nwd = os.getcwd() + '\\' + newdir nwd = os.getcwd() + '/' + newdir # for unix os.chdir(nwd) sn = 0 for filename in os.listdir(nwd): if filename.endswith('%s.vtk' % C['step']): fip[sn, :] = rr.read_vtk_scalar(filename=filename) sn += 1 """return to the original directory""" os.chdir('..') f = h5py.File("responses.hdf5", 'a') f.create_dataset('fip_%s' % set_id, data=(1e9) * fip) f.close() end = time.time() timeE = np.round((end - start), 3) msg = 'fip values read from .vtk file for %s: %s seconds' % (set_id, timeE) rr.WP(msg, C['wrt_file'])
def read_fip(el, ns, set_id, step, newdir, wrt_file): start = time.time() fip = np.zeros([ns, el**3]) # nwd = os.getcwd() + '\\' + newdir nwd = os.getcwd() + '/' + newdir # for unix os.chdir(nwd) sn = 0 for filename in os.listdir(nwd): if filename.endswith('%s.vtk' % step): fip[sn, :] = rr.read_vtk_scalar(filename=filename) sn += 1 """return to the original directory""" os.chdir('..') f = h5py.File("fip_%s%s_s%s.hdf5" % (ns, set_id, step), 'a') f.create_dataset('fip', data=fip) f.close() end = time.time() timeE = np.round((end - start), 3) msg = 'fip values read from .vtk file for %s: %s seconds' % (set_id, timeE) rr.WP(msg, wrt_file)
def bicrystal(el, ns, set_id, step, wrt_file): start = time.time() sshape = (ns, el, el, el) f = h5py.File("ref_%s%s_s%s.hdf5" % (ns, set_id, step), 'a') sves = f.create_dataset("sves", sshape, dtype='int8') M = f.create_dataset("M", sshape, dtype='complex64') sves[...] = np.zeros(sshape) for sn in xrange(ns): direc = np.int8(3 * np.random.rand()) # define a random direction vf = np.int8( 20 * np.random.rand()) + 1 # define a random volume fraction if direc == 0: sves[sn, :vf, :, :] = np.ones((vf, el, el)) elif direc == 1: sves[sn, :, :vf, :] = np.ones((el, vf, el)) elif direc == 2: sves[sn, :, :, :vf] = np.ones((el, el, vf)) M[...] = np.fft.fftn(sves[...], axes=[1, 2, 3]) f.close() end = time.time() timeE = np.round((end - start), 3) msg = "%s bicrystal SVEs generated: %ss" % (ns, timeE) rr.WP(msg, wrt_file)
def regress(ns, set_id): st = time.time() C = const() """load the feature data""" f = h5py.File("pre_regress_%s.hdf5" % set_id, 'r') X = f.get('X')[...] f.close() """load the dependent variable data""" f = h5py.File("responses.hdf5", 'r') y = f.get('y_sim_%s' % set_id)[...] y = y.reshape((ns * C['el']**3)) f.close() # clf = svm.SVR() # clf = neighbors.KNeighborsRegressor(n_neighbors=1, weights='uniform') # clf = tree.DecisionTreeRegressor(max_depth=10) clf = linear_model.LinearRegression(n_jobs=1) clf.fit(X, y) joblib.dump(clf, 'modelfit.pkl') timeE = np.round(time.time() - st, 1) msg = "fit completed: %s s" % timeE rr.WP(msg, C['wrt_file'])
def calibration_procedure(el, H, ns, set_id, step, comp, typ, wrt_file): f = h5py.File("data.hdf5", 'r') dset_name = 'M_%s%s_s%s' % (ns, set_id, step) M = f.get(dset_name)[...] dset_name = '%s%s_fft_fem_%s%s_s%s' % (typ, comp, ns, set_id, step) r_fft = f.get(dset_name)[...] f.close() start = time.time() coef = np.zeros((H, el**3), dtype='complex64') # here we perform the calibration for the scalar FIP coef[:, 0] = rr.calib(0, M, r_fft, 0, H, el, ns) [coef[:, 1], p] = rr.calib(1, M, r_fft, 0, H, el, ns) # calib_red is simply calib with some default arguments calib_red = partial(rr.calib, M=M, r_fft=r_fft, p=p, H=H, el=el, ns=ns) coef[:, 2:(el**3)] = np.asarray(map(calib_red, range(2, el**3))).swapaxes(0, 1) f = h5py.File("coef.hdf5", 'a') dset_name = 'coef%s_%s%s_s%s' % (comp, ns, set_id, step) f.create_dataset(dset_name, data=coef) f.close() end = time.time() timeE = np.round((end - start), 3) msg = 'Calibration, component %s: %s seconds' % (comp, timeE) rr.WP(msg, wrt_file)
def read_euler(el, ns, set_id, step, newdir, wrt_file, funit): start = time.time() euler = np.zeros([ns, 3, el**3]) # nwd = os.getcwd() + '\\' + newdir nwd = os.getcwd() + '/' + newdir # for unix os.chdir(nwd) sn = 0 for filename in os.listdir(nwd): if filename.endswith('%s.vtk' % step): euler[sn, :, :] = rr.read_vtk_vector(filename=filename) sn += 1 if funit == 1: euler = euler * (np.pi / 180.) """return to the original directory""" os.chdir('..') f = h5py.File("spatial_stats.hdf5", 'a') f.create_dataset('euler_%s' % set_id, data=euler) f.close() end = time.time() timeE = np.round((end - start), 3) msg = 'euler angles read from .vtk file for %s: %s seconds' % (set_id, timeE) rr.WP(msg, wrt_file)
def calibration_procedure(el, H, ns, set_id, step, comp, typ, wrt_file): M = np.load('M_%s%s_s%s.npy' % (ns, set_id, step)) r_fft = np.load('%s%s_fft_fem_%s%s_s%s.npy' % (typ, comp, ns, set_id, step)) start = time.time() specinfc = np.zeros((H, el**3), dtype='complex64') # here we perform the calibration for the scalar FIP specinfc[:, 0] = rr.calib(0, M, r_fft, 0, H, el, ns) [specinfc[:, 1], p] = rr.calib(1, M, r_fft, 0, H, el, ns) # calib_red is simply calib with some default arguments calib_red = partial(rr.calib, M=M, r_fft=r_fft, p=p, H=H, el=el, ns=ns) specinfc[:, 2:(el**3)] = np.asarray(map(calib_red, range(2, el**3))).swapaxes(0, 1) np.save('specinfc%s_%s%s_s%s' % (comp, ns, set_id, step), specinfc) end = time.time() timeE = np.round((end - start), 3) msg = 'Calibration, component %s: %s seconds' % (comp, timeE) rr.WP(msg, wrt_file)
def euler_to_gsh(el, H, ns, set_id, step, wrt_file): start = time.time() # open HDF5 file base = tb.open_file("ref_%s%s_s%s.h5" % (ns, set_id, step), mode="r") euler = base.root.msf.euler[...] # close the HDF5 file base.close() euler_GSH = np.zeros([ns, H, el**3], dtype='complex128') for sn in xrange(ns): euler_GSH[sn, :, :] = gsh.gsh(euler[sn, :, :]) end = time.time() timeE = np.round((end - start), 3) msg = "Conversion from Euler angles to GSH coefficients completed:" + \ " %s seconds" % timeE rr.WP(msg, wrt_file) euler_GSH = euler_GSH.reshape([ns, H, el, el, el]) # MICROSTRUCTURE FUNCTIONS IN FREQUENCY SPACE start = time.time() M = np.fft.fftn(euler_GSH, axes=[2, 3, 4]) del euler_GSH size = M.nbytes # open HDF5 file base = tb.open_file("D_%s%s_s%s.h5" % (ns, set_id, step), mode="a") # initialize array for the euler angles base.create_array('/msf', 'M', M, 'FFT of GSH microstructure function') # close the HDF5 file base.close() end = time.time() timeE = np.round((end - start), 3) msg = "FFT3 conversion of micr to M_%s%s_s%s: %s seconds" % \ (ns, set_id, step, timeE) rr.WP(msg, wrt_file) msg = 'Size of M_%s%s_s%s: %s bytes' % (ns, set_id, step, size) rr.WP(msg, wrt_file)
def get_M(ns, set_id): start = time.time() C = const() """get the euler angle files""" f = h5py.File("spatial.hdf5", 'a') euler = f.get('euler_%s' % set_id)[...] mf = np.zeros([ns, C['H'], C['el']**3], dtype='float64') c = 0 for h in xrange(C['H']): tmp = gsh.gsh_eval(euler.swapaxes(1, 2), [h]) tmp = np.squeeze(tmp) mf[:, c, :] = tmp.real c += 1 # mf[:, h, :] = (2*indxvec[h, 0]+1)*tmp # 2*l+1 included in maple generator end = time.time() timeE = np.round((end - start), 3) msg = "Conversion from Euler angles to GSH coefficients completed:" + \ " %s seconds" % timeE rr.WP(msg, C['wrt_file']) mf = mf.reshape([ns, C['H'], C['el'], C['el'], C['el']]) # MICROSTRUCTURE FUNCTIONS IN FREQUENCY SPACE start = time.time() M = np.fft.fftn(mf, axes=[2, 3, 4]) del mf f.create_dataset('M_%s' % set_id, data=M) f.close() end = time.time() timeE = np.round((end - start), 3) msg = "FFT3 conversion of mf to M for %s: %s seconds" % \ (set_id, timeE) rr.WP(msg, C['wrt_file']) msg = 'Size of M: %s gb' % str(M.nbytes/(1e9)) rr.WP(msg, C['wrt_file'])
def msf(el, ns, H, set_id, wrt_file): start = time.time() ## import microstructures tmp = sio.loadmat('micr_H%s_%s.mat' % (H, set_id))['gshS'] # #### 12/18/2014 CHANGE BACK # # tmp = np.swapaxes(tmp,0,2) # # tmp = tmp/np.array([[[1,5,5,5,5,5,9,9,9,9,9,9,9,9,9]]]).T #remove normalization # # tmp = np.swapaxes(tmp,0,1) # # #### tmp = np.swapaxes(np.swapaxes(tmp, 0, 2), 0, 1) micr = tmp.reshape([ns, H, el, el, el]) del tmp np.save('msf_%s%s' % (ns, set_id), micr) end = time.time() timeE = np.round((end - start), 3) msg = "generate real-space microstructure function from GSH-coefficients: %s seconds" % timeE rr.WP(msg, wrt_file) ## Microstructure functions in frequency space start = time.time() M = np.fft.fftn(micr, axes=[2, 3, 4]) del micr size = M.nbytes np.save('M_%s%s' % (ns, set_id), M) end = time.time() timeE = np.round((end - start), 3) msg = "FFT3 conversion of micr to M_%s%s: %s seconds" % (ns, set_id, timeE) rr.WP(msg, wrt_file) msg = 'Size of M_%s%s: %s bytes' % (ns, set_id, size) rr.WP(msg, wrt_file)
def euler_to_gsh(el, H, ns, set_id, step, wrt_file): start = time.time() f = h5py.File("ref_%s%s_s%s.hdf5" % (ns, set_id, step), 'r') euler = f.get('euler')[...] f.close() euler_GSH = np.zeros([ns, H, el**3], dtype='complex128') for sn in xrange(ns): tmp = gsh.gsh_eval(euler[sn, ...].swapaxes(0, 1), np.arange(15)) euler_GSH[sn, :, :] = tmp.swapaxes(0, 1) end = time.time() timeE = np.round((end - start), 3) msg = "Conversion from Euler angles to GSH coefficients completed:" + \ " %s seconds" % timeE rr.WP(msg, wrt_file) euler_GSH = euler_GSH.reshape([ns, H, el, el, el]) # MICROSTRUCTURE FUNCTIONS IN FREQUENCY SPACE start = time.time() M = np.fft.fftn(euler_GSH, axes=[2, 3, 4]) del euler_GSH size = M.nbytes f = h5py.File("D_%s%s_s%s.hdf5" % (ns, set_id, step), 'a') f.create_dataset('M', data=M) f.close() end = time.time() timeE = np.round((end - start), 3) msg = "FFT3 conversion of micr to M_%s%s_s%s: %s seconds" % \ (ns, set_id, step, timeE) rr.WP(msg, wrt_file) msg = 'Size of M_%s%s_s%s: %s bytes' % (ns, set_id, step, size) rr.WP(msg, wrt_file)
def read_meas(el, ns, set_id, step, comp, tensor_id, newdir, wrt_file): start = time.time() typ = ['sigma', 'epsilon_t', 'epsilon_p'] # nwd = os.getcwd() + '\\' + newdir nwd = os.getcwd() + '/' + newdir # for unix os.chdir(nwd) compd = {'11': 0, '22': 4, '33': 8, '12': 1, '13': 6, '23': 5} compp = compd[comp] r_fem = np.zeros([ns, el, el, el]) sn = 0 for filename in os.listdir(nwd): if filename.endswith('%s.vtk' % step): r_temp = rr.read_vtk_tensor(filename=filename, tensor_id=tensor_id, comp=compp) r_fem[sn, ...] = r_temp.reshape([el, el, el]) sn += 1 # return to the original directory os.chdir('..') # open HDF5 file base = tb.open_file("ref_%s%s_s%s.h5" % (ns, set_id, step), mode="a") # create a group one level below root called r[comp] group = base.create_group('/%s' % typ[tensor_id], 'r%s' % comp, 'comp %s response fields' % comp) # initialize array for the euler angles base.create_array(group, 'r_fem', r_fem, 'FEM generated response fields') # close the HDF5 file base.close() # FFT OF RESPONSE FIELD # open HDF5 file base = tb.open_file("D_%s%s_s%s.h5" % (ns, set_id, step), mode="a") # create a group one level below root called r[comp] group = base.create_group('/%s' % typ[tensor_id], 'r%s' % comp, 'FFTs of comp %s response fields' % comp) # initialize array for the euler angles base.create_array(group, 'r_fft', np.fft.fftn(r_fem, axes=[1, 2, 3]), 'FFT of FEM generated response fields') # close the HDF5 file base.close() end = time.time() timeE = np.round((end - start), 3) msg = 'The measure of interest has been read from .vtk file' \ ' for %s, set %s: %s seconds' % (set_id, comp, timeE) rr.WP(msg, wrt_file)
def read_meas(el, ns, set_id, step, comp, tensor_id, newdir, wrt_file): start = time.time() # nwd = os.getcwd() + '\\' + newdir nwd = os.getcwd() + '/' + newdir # for unix os.chdir(nwd) compd = {'11': 0, '22': 4, '33': 8, '23': 5, '12': 1, '13': 6} compp = compd[comp] r_real = np.zeros([ns, el, el, el]) sn = 0 for filename in os.listdir(nwd): if filename.endswith('%s.vtk' % step): r_temp = rr.read_vtk_tensor(filename=filename, tensor_id=tensor_id, comp=compp) r_real[sn, ...] = r_temp.reshape([el, el, el]) sn += 1 # for filename in os.listdir(nwd): # if filename.endswith('.vtk'): # r_temp = rr.read_vtk_tensor(filename=filename, # tensor_id=tensor_id, # comp=compp) # r_real[sn, ...] = r_temp.reshape([el, el, el]) # sn += 1 # return to the original directory os.chdir('..') typ = ['sigma', 'epsilon', 'epsilonp'] f = h5py.File("data.hdf5", 'a') dset_name = '%s%s_fem_%s%s_s%s' % (typ[tensor_id], comp, ns, set_id, step) f.create_dataset(dset_name, data=r_real) # fftn of response fields r_fft = np.fft.fftn(r_real, axes=[1, 2, 3]) del r_real dset_name = '%s%s_fft_fem_%s%s_s%s' % (typ[tensor_id], comp, ns, set_id, step) f.create_dataset(dset_name, data=r_fft) f.close() end = time.time() timeE = np.round((end - start), 3) msg = 'The measure of interest has been read from .vtk file for %s, component %s: %s seconds' % ( set_id, comp, timeE) rr.WP(msg, wrt_file)
def validation(el, H, ns_cal, ns_val, set_id_cal, set_id_val, step, comp, wrt_file): start = time.time() # open HDF5 file base = tb.open_file("infl_%s%s_s%s.h5" % (ns_cal, set_id_cal, step), mode="r") infl = base.get_node('/', 'infl%s' % comp) infl_coef = infl.infl_coef[...].reshape(H, el, el, el) # close HDF5 file base.close() # open HDF5 file base = tb.open_file("D_%s%s_s%s.h5" % (ns_val, set_id_val, step), mode="r") M = base.root.msf.M[...] # close HDF5 file base.close() # perform the validation calculations tmp = np.sum(np.conjugate(infl_coef) * M, 1) r_mks = np.fft.ifftn(tmp, [el, el, el], [1, 2, 3]).real # open HDF5 file base = tb.open_file("ref_%s%s_s%s.h5" % (ns_val, set_id_val, step), mode="a") # save the MKS predicted total strain fields group = base.get_node('/epsilon_t', 'r%s' % comp) base.create_array(group, 'r_mks', r_mks) # find the node containing the fem total strain fields r_et = base.get_node('/epsilon_t', 'r%s' % comp) et_fem = r_et.r_fem[...] # find the node containing the fem plastic strain fields r_ep = base.get_node('/epsilon_p', 'r%s' % comp) ep_fem = r_ep.r_fem[...] # estimate the plastic strain predicted by fem as the mks predicted total # strain minus the FEM elastic strain field ep_mks = r_mks - (et_fem - ep_fem) del et_fem, ep_fem # save the estimated plastic strain from MKS to an array base.create_array(r_ep, 'r_mks', ep_mks) # close HDF5 file base.close() end = time.time() timeE = np.round((end - start), 3) msg = 'validation performed for component %s: %s seconds' % (comp, timeE) rr.WP(msg, wrt_file)
def read_meas(ns, set_id, step, comp, vtk_filename, tensor_id, newdir, wrt_file): start = time.time() ## el is the # of elements per side of the cube el = 21 r_real = np.zeros([el,el,el,ns]) ## change to directory with the .vtk files # cwd = os.getcwd() ## os.chdir(cwd + '\\' + newdir) # os.chdir(cwd + '/' + newdir) #for unix # # compd = {'11':0,'22':4,'33':8,'12':1,'23':5,'31':6} # compp = compd[comp] # print compp # # for sn in xrange(ns): # l_sn = str(sn+1).zfill(5) # r_temp = rr.read_vtk_tensor(filename = vtk_filename %l_sn, tensor_id = tensor_id, comp = compp) # r_real[:,:,:,sn] = np.swapaxes(np.reshape(np.flipud(r_temp), [el,el,el]),1,2) nwd = os.getcwd() + '\\' + newdir # nwd = os.getcwd() + '/' + newdir #for unix os.chdir(nwd) compd = {'11':0,'22':4,'33':8,'12':1,'23':5,'31':6} compp = compd[comp] sn = 0 for filename in os.listdir(nwd): if filename.endswith('%s.vtk' %step): r_temp = rr.read_vtk_tensor(filename = filename, tensor_id = tensor_id, comp = compp) r_real[:,:,:,sn] = np.swapaxes(np.reshape(np.flipud(r_temp), [el,el,el]),1,2) sn += 1 ## return to the original directory os.chdir('..') np.save('r%s_%s%s_s%s' %(comp,ns,set_id,step), r_real) ## fftn of response fields r_fft = np.fft.fftn(r_real, axes = [0,1,2]) del r_real np.save('r%s_fft_%s%s_s%s' %(comp,ns,set_id,step),r_fft) end = time.time() timeE = np.round((end - start),3) msg = 'The measure of interest has been read from .vtk file for %s, set %s: %s seconds' %(set_id,comp,timeE) rr.WP(msg,wrt_file)
def micr_func(ns, set_id, step, wrt_file): start = time.time() el = 21 ## specify the number of local states you are using H = 15 ## import microstructures micr = np.zeros([el, el, el, ns, H], dtype='complex128') pre_micr = np.load('euler_GSH_%s%s_s%s.npy' % (ns, set_id, step)) for h in xrange(H): for sn in range(ns): micr[:, :, :, sn, h] = np.swapaxes( np.reshape(np.flipud(pre_micr[:, sn, h]), [el, el, el]), 1, 2) del pre_micr end = time.time() timeE = np.round((end - start), 3) msg = "generate real-space microstructure function from GSH-coefficients: %s seconds" % timeE rr.WP(msg, wrt_file) ## Microstructure functions in frequency space start = time.time() M = np.fft.fftn(micr, axes=[0, 1, 2]) del micr size = M.nbytes np.save('M_%s%s_s%s' % (ns, set_id, step), M) end = time.time() timeE = np.round((end - start), 3) msg = "FFT3 conversion of micr to M_%s%s_s%s: %s seconds" % (ns, set_id, step, timeE) rr.WP(msg, wrt_file) msg = 'Size of M_%s%s_s%s: %s bytes' % (ns, set_id, step, size) rr.WP(msg, wrt_file)
def micr_func(el, H, ns, set_id, step, wrt_file): start = time.time() f = h5py.File("data.hdf5", 'a') dset_name = 'euler_GSH_%s%s_s%s.npy' % (ns, set_id, step) tmp = f.get(dset_name)[...] # tmp = np.swapaxes(np.swapaxes(tmp,0,2),0,1) micr = tmp.reshape([ns, H, el, el, el]) del tmp end = time.time() timeE = np.round((end - start), 3) msg = "generate real-space microstructure function from GSH-coefficients:"\ " %s seconds" % timeE rr.WP(msg, wrt_file) # Microstructure functions in frequency space start = time.time() M = np.fft.fftn(micr, axes=[2, 3, 4]) del micr size = M.nbytes dset_name = 'M_%s%s_s%s' % (ns, set_id, step) f.create_dataset(dset_name, data=M) f.close() end = time.time() timeE = np.round((end - start), 3) msg = "FFT3 conversion of micr to M_%s%s_s%s: %s seconds" % (ns, set_id, step, timeE) rr.WP(msg, wrt_file) msg = 'Size of M_%s%s_s%s: %s bytes' % (ns, set_id, step, size) rr.WP(msg, wrt_file)
def rand(el, ns, set_id, step, wrt_file): start = time.time() f = h5py.File("ref_%s%s_s%s.hdf5" % (ns, set_id, step), 'a') euler = f.create_dataset("euler", (ns, 3, el**3), dtype='float64') euler[...] = np.random.rand(ns, 3, el**3) * 360 f.close() end = time.time() timeE = np.round((end - start), 3) msg = "%s random SVEs generated random orientations: %ss" % (ns, timeE) rr.WP(msg, wrt_file)
def read_meas(ns, set_id, comp, tensor_id, newdir): start = time.time() C = const() typ = ['sigma', 'epsilon_t', 'epsilon_p'] # nwd = os.getcwd() + '\\' + newdir nwd = os.getcwd() + '/' + newdir # for unix os.chdir(nwd) compd = {'11': 0, '22': 4, '33': 8, '12': 1, '13': 6, '23': 5} compp = compd[comp] r_fem = np.zeros([ns, C['el'], C['el'], C['el']]) sn = 0 for filename in os.listdir(nwd): if filename.endswith('%s.vtk' % C['step']): r_temp = rr.read_vtk_tensor(filename=filename, tensor_id=tensor_id, comp=compp) r_fem[sn, ...] = r_temp.reshape([C['el'], C['el'], C['el']]) sn += 1 """return to the original directory""" os.chdir('..') f = h5py.File("responses.hdf5", 'a') f.create_dataset('%s_%s' % (typ[tensor_id], set_id), data=r_fem) f.close() # """FFT OF RESPONSE FIELD""" # f = h5py.File("D_%s%s_s%s.hdf5" % (ns, set_id, step), 'a') # tmp = np.fft.fftn(r_fem, axes=[1, 2, 3]) # print tmp.shape # f.create_dataset('rfft%s_%s' % (comp, typ[tensor_id]), data=tmp) # f.close() end = time.time() timeE = np.round((end - start), 3) msg = 'The measure of interest has been read from .vtk file' \ ' for %s, component %s, type %s: %s seconds' % (set_id, comp, typ[tensor_id], timeE) rr.WP(msg, C['wrt_file'])
def improcess(el, ns, H, set_id, wrt_file): start = time.time() sshape = (ns, el, el) f = h5py.File("spatial_stats.hdf5", 'a') sves = f.create_dataset("sves_%s" % set_id, sshape, dtype='int64') sigset = [0., .2, .4, .6, .8, 2, 5] for sn in xrange(ns): base = np.random.random((el, el)) r2a = np.random.randint(1, 5) r2b = np.random.randint(1, 5) weights = np.random.random(size=(r2a, r2b)) raw = convolve(base, weights, mode='wrap') blur = gaussian_filter(raw, sigma=np.random.choice(sigset)) scaled = scale_array(blur) scaled_lin = scaled.reshape(el**2) sve = np.zeros((el**2)) vf_bounds = np.zeros(H + 1) vf_bounds[-1] = 1 tmp = np.sort(np.random.rand(H - 1)) vf_bounds[1:H] = tmp for ii in xrange(H): indx = (scaled_lin > vf_bounds[ii]) * (scaled_lin <= vf_bounds[ii + 1]) sve[indx] = ii sves[sn, ...] = sve.reshape(el, el) tmp = sves[...] f.close() end = time.time() timeE = np.round((end - start), 3) msg = "%s bicrystal SVEs generated: %ss" % (ns, timeE) rr.WP(msg, wrt_file) return tmp
def calibration_procedure(el, H, ns, set_id, step, comp, typ, wrt_file): st = time.time() # open HDF5 file base = tb.open_file("gsh_try_%s%s_s%s.h5" % (ns, set_id, step), mode="r") M = base.root.msf.M[...] # close the HDF5 file base.close() # open HDF5 file base = tb.open_file("ref_%s%s_s%s.h5" % (ns, set_id, step), mode="r") # retrieve data from HDF5 file resp = base.get_node('/%s' % typ, 'r%s' % comp) r_fft = resp.r_fft[...] # close the HDF5 file base.close() specinfc = np.zeros((H, el**3), dtype='complex64') # here we perform the calibration for the scalar FIP specinfc[:, 0] = rr.calib(0, M, r_fft, 0, H, el, ns) [specinfc[:, 1], p] = rr.calib(1, M, r_fft, 0, H, el, ns) # calib_red is simply calib with some default arguments calib_red = partial(rr.calib, M=M, r_fft=r_fft, p=p, H=H, el=el, ns=ns) specinfc[:, 2:(el**3)] = np.asarray(map(calib_red, range(2, el**3))).swapaxes(0, 1) # open HDF5 file base = tb.open_file("infl_%s%s_s%s.h5" % (ns, set_id, step), mode="w") # create a group one level below root called infl[comp] group = base.create_group('/', 'infl%s' % comp, 'influence function for component %s' % comp) base.create_array(group, 'infl_coef', specinfc, 'array of influence coefficients') # close the HDF5 file base.close() msg = 'Calibration, component %s: %s seconds' % \ (comp, np.round((time.time() - st), 3)) rr.WP(msg, wrt_file)
def features(ns, set_id): st = time.time() C = const() """gather the independent variable data""" f = h5py.File("spatial.hdf5", 'r') neig = f.get('neig_%s' % set_id)[...] neig = neig.reshape((C['n_samp'], C['H'], C['cmax'])) f.close() """calculate the X matrix""" X = np.zeros((C['n_samp'], C['xmax']), dtype='float64') c = 0 # keep track of position in X """for 0th order polynomial""" X[:, 0] = 1 c += 1 """for 1st order polynomial""" Imax = C['H']*C['cmax'] Imat = np.unravel_index(np.arange(Imax), (C['H'], C['cmax'])) Imat = np.array(Imat).T for I in xrange(Imax): h, pos = Imat[I, :] X[:, c] = neig[:, h, pos] c += 1 """for 2nd order polynomial""" Imax = C['H']*C['cmax']**2 Imat = np.unravel_index(np.arange(Imax), (C['H'], C['cmax'], C['cmax'])) Imat = np.array(Imat).T for I in xrange(Imax): h, pos1, pos2 = Imat[I, :] X[:, c] = neig[:, h, pos1]*neig[:, h, pos2] c += 1 f = h5py.File("pre_regress_%s.hdf5" % set_id, 'w') f.create_dataset('X', data=X) f.close() timeE = np.round(time.time()-st, 1) msg = "features extracted for %s: %s s" % (set_id, timeE) rr.WP(msg, C['wrt_file'])