def __init__(self, which_case, LUT, RandomSamples, interp_type): print 'SciPy Interpolating ', which_case select = {\ "rhoe":('Density','StaticEnergy'),\ "PT":('Pressure','Temperature'),\ "Prho":('Pressure','Density'),\ "rhoT":('Density','Temperature'),\ "Ps":('Pressure','Entropy'),\ "hs":('Enthalpy','Entropy')\ } thermo1, thermo2, = select[which_case] x =getattr(LUT,thermo1) y =getattr(LUT,thermo2) samples_x = getattr(RandomSamples,thermo1) samples_y = getattr(RandomSamples,thermo2) setattr(self,thermo1, samples_x) setattr(self,thermo2, samples_y) variables = sp.array(['Temperature','Density','Enthalpy','StaticEnergy',\ 'Entropy','Pressure','SoundSpeed2','dPdrho_e','dPde_rho',\ 'dTdrho_e','dTde_rho','Cp','Mu','Kt']); for var in variables[sp.where((variables!=thermo1) * (variables!=thermo2))]: z = getattr(LUT,var) interp_func = sp.interpolate.griddata((x,y),z,sp.column_stack((samples_x,samples_y)),\ method=interp_type) nan_index = sp.where(sp.isnan(interp_func)) interp_func[nan_index]= sp.interpolate.griddata((x,y),z,\ sp.column_stack((samples_x[nan_index],samples_y[nan_index])),\ method='nearest') setattr(self,var,interp_func) return
def GetFermiSigns(filename, refstate=None, channel=None): attr = GetAttr(filename) filetype = '' try: filetype = attr['type'] except KeyError: mo = re.match('.*/?[0-9]+-(.*)\.h5', filename) filetype = mo.groups()[0] L = attr['L'] if refstate == None: refstate = sc.zeros(2 * L * L) refstate[0::2] = 1 if filetype == 'WaveFunction': hfile = h5py.File(filename, 'r') if 'states_up' in hfile.keys(): states = sc.column_stack([hfile['states_up'], hfile['states_do']]) else: states = sc.column_stack([hfile['states_0'], hfile['states_1']]) hfile.close() return sf.fermisigns(states, refstate) else: if channel == None: channel = attr['channel'] L = int(attr['L']) if 'phasex' in attr.keys(): shift = [attr['phasex'] / 2.0, attr['phasey'] / 2.0] else: shift = [attr['phase_shift_x'] / 2.0, attr['phase_shift_y'] / 2.0] q = [float(attr['qx']) / L, float(attr['qy']) / L] if channel == 'trans': return sf.transfermisigns(L, L, shift, q) elif channel == 'long': return sf.longfermisigns(L, L, shift, q) else: raise KeyError('\"channel\" must be either \"trans\" or \"long\".')
def _packData(self, G1, indices2select, effect): if effect == 'fixed': if G1 is None and self.G0 is None: data = self.X[self.data_permutation][indices2select] elif G1 is None: data = sp.column_stack((self.G0[self.data_permutation][indices2select], self.X[self.data_permutation][indices2select])) elif self.G0 is None: data = sp.column_stack((G1[self.data_permutation][indices2select], self.X[self.data_permutation][indices2select])) else: data = sp.column_stack((self.G0[self.data_permutation][indices2select], G1[self.data_permutation][indices2select], self.X[self.data_permutation][indices2select])) elif effect == 'mixed': X = self.X[self.data_permutation] if self.G0 is not None: G0 = self.G0[self.data_permutation] if G1 is not None: G1 = G1[self.data_permutation] data = [] for i in range(len(indices2select)): lis = [X[indices2select[i]]] if G0 is not None: lis.append( G0[indices2select[i]] ) if G1 is not None: lis.append( G1[indices2select[i]] ) data.append( lis ) else: assert False, 'Unkown effect type.' return (data, self.Y[self.data_permutation][indices2select])
def GetFermiSigns(filename,refstate=None,channel=None): attr=GetAttr(filename) filetype='' try: filetype=attr['type'] except KeyError: mo=re.match('.*/?[0-9]+-(.*)\.h5',filename) filetype=mo.groups()[0] L=attr['L'] if refstate==None: refstate=sc.zeros(2*L*L) refstate[0::2]=1 if filetype=='WaveFunction': hfile=h5py.File(filename,'r') if 'states_up' in hfile.keys(): states=sc.column_stack([hfile['states_up'],hfile['states_do']]) else: states=sc.column_stack([hfile['states_0'],hfile['states_1']]) hfile.close() return sf.fermisigns(states,refstate) else: if channel==None: channel=attr['channel'] L=int(attr['L']) if 'phasex' in attr.keys(): shift=[attr['phasex']/2.0,attr['phasey']/2.0] else: shift=[attr['phase_shift_x']/2.0,attr['phase_shift_y']/2.0] q=[float(attr['qx'])/L,float(attr['qy'])/L] if channel=='trans': return sf.transfermisigns(L,L,shift,q) elif channel=='long': return sf.longfermisigns(L,L,shift,q) else: raise KeyError('\"channel\" must be either \"trans\" or \"long\".')
def bm29write(self, filename=None, newdata=None, comment=True, newdatacol_head=None, columns=None): """function to write a bm29 file, define a series of array with the same name of column defined in the file input \n filename =>string newdata =>numpy array comment =>boolean or new comments line col_head =>string heders of column columns =>list of strings with the columns to write P.S or newdata and col_head or columns """ if not (filename): try: filename = getattr(self, "fullfilename") except: raise FileFormatError( 'no filename specified and self.fullfilenames not defined') if os.path.exists(filename): filename += ".1" outFile = open(filename, 'w') if comment is True: if hasattr(self, "comments"): outFile.writelines(self.comments[:-2]) elif comment is False: pass else: outFile.writelines(comment) if newdata is None: if columns: outFile.writelines("#N " + str(len(columns)) + "\n") col_head = "#L " + " ".join(columns) outFile.writelines(col_head + "\n") f = lambda x: getattr(self, x) newdata = scipy.column_stack(map(f, columns)) else: if self.All_Column == False: outFile.writelines("#N 2\n") outFile.writelines("#L E Mu\n") newdata = scipy.column_stack((self.E, self.Mu)) elif self.All_Column == True: try: outFile.writelines(self.comments[-2]) outFile.write("#L " + " ".join(getattr(self, "col_head"))) outFile.write("\n") newdata = self.data except: pass scipy.savetxt(outFile, newdata, fmt='%1.10f') outFile.close return
def __init__(self,ionoin,configfile,timein=None,mattype='matrix'): """ This will create the RadarSpaceTimeOperator object. Inputs ionoin - The input ionocontainer. This can be either an string that is a ionocontainer file, a list of ionocontainer objects or a list a strings to ionocontainer files. config - The ini file that used to set up the simulation. timein - A Ntx2 numpy array of times. RSTOPinv - The inverse operator object. invmat - The inverse matrix to the original operator. """ mattype=mattype.lower() accepttypes=['matrix','sim','real'] if not mattype in accepttypes: raise ValueError('Matrix type can only be {0}'.format(', '.join(accepttypes))) d2r = sp.pi/180.0 (sensdict,simparams) = readconfigfile(configfile) # determine if the input ionocontainer is a string, a list of strings or a list of ionocontainers. ionoin=makeionocombined(ionoin) #Input location self.Cart_Coords_In = ionoin.Cart_Coords self.Sphere_Coords_In = ionoin.Sphere_Coords # Set the input times if timein is None: self.Time_In = ionoin.Time_Vector else: self.Time_In = timein #Create an array of output location based off of the inputs rng_vec2 = simparams['Rangegatesfinal'] nrgout = len(rng_vec2) angles = simparams['angles'] nang =len(angles) ang_data = sp.array([[iout[0],iout[1]] for iout in angles]) rng_all = sp.repeat(rng_vec2,(nang),axis=0) ang_all = sp.tile(ang_data,(nrgout,1)) self.Sphere_Coords_Out = sp.column_stack((rng_all,ang_all)) (R_vec,Az_vec,El_vec) = (self.Sphere_Coords_Out[:,0],self.Sphere_Coords_Out[:,1], self.Sphere_Coords_Out[:,2]) xvecmult = sp.sin(Az_vec*d2r)*sp.cos(El_vec*d2r) yvecmult = sp.cos(Az_vec*d2r)*sp.cos(El_vec*d2r) zvecmult = sp.sin(El_vec*d2r) X_vec = R_vec*xvecmult Y_vec = R_vec*yvecmult Z_vec = R_vec*zvecmult self.Cart_Coords_Out = sp.column_stack((X_vec,Y_vec,Z_vec)) self.Time_Out = sp.column_stack((simparams['Timevec'],simparams['Timevec']+simparams['Tint']))+self.Time_In[0,0] self.simparams=simparams self.sensdict=sensdict self.lagmat = self.simparams['amb_dict']['WttMatrix'] self.mattype=mattype # create the matrix (self.RSTMat,self.overlaps,self.blocklocs) = makematPA(ionoin.Sphere_Coords,ionoin.Cart_Coords,ionoin.Time_Vector,configfile,ionoin.Velocity,mattype)
def makedata(testpath, tint): """ This will make the input data for the test case. The data will have cases where there will be enhancements in Ne, Ti and Te in one location. Each case will have 3 integration periods. The first 3 integration periods will be the default set of parameters Ne=Ne=1e11 and Te=Ti=2000. Inputs testpath - Directory that will hold the data. tint - The integration time in seconds. """ testpath = Path(testpath).expanduser() finalpath = testpath.joinpath('Origparams') if not finalpath.is_dir(): finalpath.mkdir() data = sp.array([[1e11, 1100.], [1e11, 2100.]]) z = (50. + sp.arange(50) * 10.) nz = len(z) params = sp.tile(data[sp.newaxis, sp.newaxis], (nz, 1, 1, 1)) epnt = range(20, 22) p2 = sp.tile(params, (1, 4, 1, 1)) #enhancement in Ne p2[epnt, 1, :, 0] = 5e11 #enhancement in Ti p2[epnt, 2, 0, 1] = 2200. #enhancement in Te p2[epnt, 3, 1, 1] = 4200. coords = sp.column_stack((sp.ones(nz), sp.ones(nz), z)) species = ['O+', 'e-'] times = sp.array([[0, 1e3]]) times2 = sp.column_stack((sp.arange(0, 4), sp.arange(1, 5))) * 3 * tint vel = sp.zeros((nz, 1, 3)) vel2 = sp.zeros((nz, 4, 3)) Icontstart = IonoContainer(coordlist=coords, paramlist=params, times=times, sensor_loc=sp.zeros(3), ver=0, coordvecs=['x', 'y', 'z'], paramnames=None, species=species, velocity=vel) Icont1 = IonoContainer(coordlist=coords, paramlist=p2, times=times2, sensor_loc=sp.zeros(3), ver=0, coordvecs=['x', 'y', 'z'], paramnames=None, species=species, velocity=vel2) finalfile = finalpath.joinpath('0 stats.h5') Icont1.saveh5(str(finalfile)) Icontstart.saveh5(str(testpath.joinpath('startfile.h5')))
def writeCSVOutput(hdf5File=None,cout=None): f = h5py.File(hdf5File,'r') csv_filename = os.path.join(cout,f['phenotype_name'].value.replace(" ","_") + ".csv") csv_header = None csv_matrix = None if "betas" in f.keys(): csv_header = sp.array(["CHR","Positions","P-Value","TestStatistic","Q-Value","Benjamini-Hochberg-P-Value","Benjamini-Hochberg-Yekutieli-P-Value","Beta0","SEBeta0","Beta1","SEBeta1","MAF","SNP-Hash"]) else: csv_header = sp.array(["CHR","Positions","P-Value","TestStatistic","Q-Value","Benjamini-Hochberg-P-Value","Benjamini-Hochberg-Yekutieli-P-Value","MAF","SNP-Hash"]) tmp_matrix = None if "betas" in f.keys(): tmp_matrix = sp.column_stack([sp.array(f["chromosomes"],dtype="S50"), f['positions'], f['p_values'], f['scores'], f['q_values'], f['bh_p_values'], f['bhy_p_values'], f['betas'][:,0], f['betas_se'][:,0], f['betas'][:,1], f['betas_se'][:,1], f['maf'], f['snp_hash']]) else: tmp_matrix = sp.column_stack([sp.array(f["chromosomes"],dtype="S50"), f['positions'], f['p_values'], f['scores'], f['q_values'], f['bh_p_values'], f['bhy_p_values'], f['maf'], f['snp_hash']]) csv_matrix = tmp_matrix mf = open(csv_filename,'w') string = "" for i in xrange(csv_header.shape[0]): string += str(csv_header[i]) + "," mf.write(string[:-1] + "\n") for i in xrange(csv_matrix.shape[0]): string = "" for j in xrange(csv_matrix.shape[1]): string += str(csv_matrix[i,j]) + "," mf.write(string[:-1] + "\n") mf.close() f.close()
def bm29write(self, filename= None, newdata=None, comment=True, newdatacol_head=None, columns=None): """function to write a bm29 file, define a series of array with the same name of column defined in the file input \n filename =>string newdata =>numpy array comment =>boolean or new comments line col_head =>string heders of column columns =>list of strings with the columns to write P.S or newdata and col_head or columns """ if not(filename): try: filename =getattr(self,"fullfilename") except: raise FileFormatError('no filename specified and self.fullfilenames not defined') if os.path.exists(filename): filename += ".1" outFile = open(filename, 'w') if comment is True: if hasattr(self, "comments"): outFile.writelines(self.comments[:-2]) elif comment is False: pass else: outFile.writelines(comment) if newdata is None: if columns: outFile.writelines("#N "+str(len(columns))+ "\n") col_head= "#L "+" ".join(columns) outFile.writelines(col_head+"\n") f=lambda x: getattr(self,x) newdata= scipy.column_stack(map(f,columns)) else: if self.All_Column == False: outFile.writelines("#N 2\n") outFile.writelines("#L E Mu\n") newdata= scipy.column_stack((self.E, self.Mu)) elif self.All_Column == True: try: outFile.writelines(self.comments[-2]) outFile.write("#L "+" ".join(getattr(self, "col_head"))) outFile.write("\n") newdata = self.data except: pass scipy.savetxt(outFile, newdata, fmt= '%1.10f') outFile.close return
def create_csv(file_path=None, csv_filename=None): f = h5py.File(file_path, 'r') csv_filename = os.path.join( csv_filename, f['phenotype_name'].value.replace(" ", "_") + ".csv") csv_header = None csv_matrix = None if "betas" in f.keys(): csv_header = sp.array([ "CHR", "Positions", "P-Value", "TestStatistic", "Q-Value", "Benjamini-Hochberg-P-Value", "Benjamini-Hochberg-Yekutieli-P-Value", "Beta0", "SEBeta0", "Beta1", "SEBeta1", "MAF", "SNP-Hash" ]) else: csv_header = sp.array([ "CHR", "Positions", "P-Value", "TestStatistic", "Q-Value", "Benjamini-Hochberg-P-Value", "Benjamini-Hochberg-Yekutieli-P-Value", "MAF", "SNP-Hash" ]) tmp_matrix = None if "betas" in f.keys(): tmp_matrix = sp.column_stack([ sp.array(f["chromosomes"], dtype="S50"), f['positions'], f['p_values'], f['scores'], f['q_values'], f['bh_p_values'], f['bhy_p_values'], f['betas'][:, 0], f['betas_se'][:, 0], f['betas'][:, 1], f['betas_se'][:, 1], f['maf'], f['snp_hash'] ]) else: tmp_matrix = sp.column_stack([ sp.array(f["chromosomes"], dtype="S50"), f['positions'], f['p_values'], f['scores'], f['q_values'], f['bh_p_values'], f['bhy_p_values'], f['maf'], f['snp_hash'] ]) csv_matrix = tmp_matrix mf = open(csv_filename, 'w') string = "" for i in xrange(csv_header.shape[0]): string += str(csv_header[i]) + "," mf.write(string[:-1] + "\n") for i in xrange(csv_matrix.shape[0]): string = "" for j in xrange(csv_matrix.shape[1]): string += str(csv_matrix[i, j]) + "," mf.write(string[:-1] + "\n") mf.close() f.close()
def pyglowinput( latlonalt=[65.1367, -147.4472, 250.00], dn_list=[datetime(2015, 3, 21, 8, 00), datetime(2015, 3, 21, 20, 00)], z=None): if z is None: z = sp.linspace(50., 1000., 200) dn_diff = sp.diff(dn_list) dn_diff_sec = dn_diff[-1].seconds timelist = sp.array([calendar.timegm(i.timetuple()) for i in dn_list]) time_arr = sp.column_stack((timelist, sp.roll(timelist, -1))) time_arr[-1, -1] = time_arr[-1, 0] + dn_diff_sec v = [] coords = sp.column_stack((sp.zeros((len(z), 2), dtype=z.dtype), z)) all_spec = ['O+', 'NO+', 'O2+', 'H+', 'HE+'] Param_List = sp.zeros((len(z), len(dn_list), len(all_spec), 2)) for idn, dn in enumerate(dn_list): for iz, zcur in enumerate(z): latlonalt[2] = zcur pt = Point(dn, *latlonalt) pt.run_igrf() pt.run_msis() pt.run_iri() # so the zonal pt.u and meriodinal winds pt.v will coorispond to x and y even though they are # supposed to be east west and north south. Pyglow does not seem to have # vertical winds. v.append([pt.u, pt.v, 0]) for is1, ispec in enumerate(all_spec): Param_List[iz, idn, is1, 0] = pt.ni[ispec] * 1e6 Param_List[iz, idn, :, 1] = pt.Ti Param_List[iz, idn, -1, 0] = pt.ne * 1e6 Param_List[iz, idn, -1, 1] = pt.Te Param_sum = Param_List[:, :, :, 0].sum(0).sum(0) spec_keep = Param_sum > 0. species = sp.array(all_spec)[spec_keep[:-1]].tolist() species.append('e-') Param_List[:, :] = Param_List[:, :, spec_keep] Iono_out = IonoContainer(coords, Param_List, times=time_arr, species=species) return Iono_out
def readMahalih5(filename,des_site): """ This function will read the mahali GPS data into a GeoData data structure. The user only has to give a filename and name of the desired site. Input filename - A string that holds the file name. des_site - The site name. Should be listed in the h5 file in the table sites. """ h5fn = Path(filename).expanduser() with h5py.File(str(h5fn), "r", libver='latest') as f: despnts = sp.where(f['data']['site']==des_site)[0] # TODO: hard coded for now doy = doy= f['data']['time'][despnts] year = 2015*sp.ones_like(doy,dtype=int) TEC = f['data']['los_tec'][despnts] nTEC = f['data']['err_los_tec'][despnts] vTEC = f['data']['vtec'][despnts] az2sat = f['data']['az'][despnts] el2sat = f['data']['az'][despnts] piercelat = f['data']['pplat'][despnts] piercelong = f['data']['pplon'][despnts] satnum= f['data']['prn'][despnts] recBias = f['data']['rec_bias'][despnts] nrecBias = f['data']['err_rec_bias'][despnts] # Make the integration time on the order of 15 seconds. if (year==year[1]).all(): unixyear =(datetime(year[0],1,1,0,0,0,tzinfo=UTC) - EPOCH).total_seconds() uttime = unixyear + sp.round_(24*3600*sp.column_stack((doy,doy+15./24./3600.))) # Making the difference in time to be a minute else: (y_u,y_iv) = np.unique(year,return_inverse=True) unixyearu = sp.array([(datetime(iy,1,1,0,0,0,tzinfo=UTC) - EPOCH).total_seconds() for iy in y_u]) unixyear = unixyearu[y_iv] uttime = unixyear + 24*3600*sp.column_stack((doy,doy+15./24./3600.)) data = {'TEC':TEC,'nTEC':nTEC,'vTEC':vTEC,'recBias':recBias,'nrecBias':nrecBias,'satnum':satnum,'az2sat':az2sat,'el2sat':el2sat} coordnames = 'WGS84' sensorloc = sp.nan*sp.ones(3) dataloc = sp.column_stack((piercelat,piercelong, 350e3*sp.ones_like(piercelat))) return (data,coordnames,dataloc,sensorloc,uttime)
def makedata(testpath): """ This will make the input data for the test case. The data will have the default set of parameters Ne=Ne=1e11 and Te=Ti=2000. Inputs testpath - Directory that will hold the data. """ finalpath = testpath.joinpath('Origparams') if not finalpath.exists(): finalpath.mkdir() data = SIMVALUES z = sp.linspace(50., 1e3, 50) nz = len(z) params = sp.tile(data[sp.newaxis, sp.newaxis, :, :], (nz, 1, 1, 1)) coords = sp.column_stack((sp.ones(nz), sp.ones(nz), z)) species = ['O+', 'e-'] times = sp.array([[0, 1e9]]) vel = sp.zeros((nz, 1, 3)) Icont1 = IonoContainer(coordlist=coords, paramlist=params, times=times, sensor_loc=sp.zeros(3), ver=0, coordvecs=['x', 'y', 'z'], paramnames=None, species=species, velocity=vel) finalfile = finalpath.joinpath('0 stats.h5') Icont1.saveh5(str(finalfile)) # set start temp to 1000 K. Icont1.Param_List[:, :, :, 1] = 1e3 Icont1.saveh5(str(testpath.joinpath('startfile.h5')))
def main(): saved_handler = sp.seterrcall(err_handler) saved_err = sp.seterr(all='call') print('============ Part 1: Plotting =============================') x, y = load_data('ex2/ex2data1.txt') plot_data(x, y) pl.show() print('============ Part 2: Compute Cost and Gradient ============') m, n = x.shape x = sp.column_stack((sp.ones((m, 1)), x)) init_theta = sp.asmatrix(sp.zeros((n + 1, 1))) cost, grad = cost_function(init_theta, x, y) print('Cost at initial theta: %s' % cost) print('Gradient at initial theta:\n %s' % grad) print('============ Part 3: Optimizing minimize ====================') # res = op.minimize(cost_function, init_theta, args=(x, y), jac=True, method='Newton-CG') res = op.minimize(cost_function_without_grad, init_theta, args=(x, y), method='Powell') # print('Cost at theta found by fmin: %s' % cost) print('Result by minimize:\n%s' % res) plot_decision_boundary(res.x, x, y) pl.show() print('============ Part 4: Optimizing fmin ====================') res = op.fmin(cost_function_without_grad, init_theta, args=(x, y)) # print('Cost at theta found by fmin: %s' % cost) print('Result by fmin:\n%s' % res) plot_decision_boundary(res, x, y) pl.show() sp.seterrcall(saved_handler) sp.seterr(**saved_err)
def makedata(testpath): """ This will make the input data for the test case. The data will have the default set of parameters Ne=Ne=1e11 and Te=Ti=2000. Inputs testpath - Directory that will hold the data. """ finalpath = testpath.joinpath('Origparams') if not finalpath.exists(): finalpath.mkdir() data=SIMVALUES z = sp.linspace(50.,1e3,50) nz = len(z) params = sp.tile(data[sp.newaxis,sp.newaxis,:,:],(nz,1,1,1)) coords = sp.column_stack((sp.ones(nz),sp.ones(nz),z)) species=['O+','e-'] times = sp.array([[0,1e3]]) vel = sp.zeros((nz,1,3)) Icont1 = IonoContainer(coordlist=coords,paramlist=params,times = times,sensor_loc = sp.zeros(3),ver =0,coordvecs = ['x','y','z'],paramnames=None,species=species,velocity=vel) finalfile = finalpath.joinpath('0 stats.h5') Icont1.saveh5(str(finalfile)) # set start temp to 1000 K. Icont1.Param_List[:,:,:,1]=1e3 Icont1.saveh5(str(testpath.joinpath('startfile.h5')))
def apply_flow(self,flowrate): r''' Convert the invaded sequence into an invaded time for a given flow rate considering the volume of invaded pores and throats. Parameters ---------- flowrate : float The flow rate of the injected fluid Returns ------- Creates a throat array called 'invasion_time' in the Algorithm dictionary ''' P12 = self._net['throat.conns'] # List of throats conns a = self['throat.invasion_sequence'] # Invasion sequence b = sp.argsort(self['throat.invasion_sequence']) P12_inv = self['pore.invasion_sequence'][P12] # Pore invasion sequence # Find if the connected pores were invaded with or before each throat P1_inv = P12_inv[:,0] == a P2_inv = P12_inv[:,1] == a c = sp.column_stack((P1_inv,P2_inv)) d = sp.sum(c,axis=1,dtype=bool) # List of Pores invaded with each throat # Find volume of these pores P12_vol = sp.zeros((self.Nt,)) P12_vol[d] = self._net['pore.volume'][P12[c]] # Add invaded throat volume to pore volume (if invaded) T_vol = P12_vol + self._net['throat.volume'] # Cumulative sum on the sorted throats gives cumulated inject volume e = sp.cumsum(T_vol[b]/flowrate) t = sp.zeros((self.Nt,)) t[b] = e # Convert back to original order self._phase['throat.invasion_time'] = t
def apply_flow(self, flowrate): r""" Convert the invaded sequence into an invaded time for a given flow rate considering the volume of invaded pores and throats. Parameters ---------- flowrate : float The flow rate of the injected fluid Returns ------- Creates a throat array called 'invasion_time' in the Algorithm dictionary """ P12 = self._net['throat.conns'] a = self['throat.invasion_sequence'] b = sp.argsort(self['throat.invasion_sequence']) P12_inv = self['pore.invasion_sequence'][P12] # Find if the connected pores were invaded with or before each throat P1_inv = P12_inv[:, 0] == a P2_inv = P12_inv[:, 1] == a c = sp.column_stack((P1_inv, P2_inv)) d = sp.sum(c, axis=1, dtype=bool) # List of Pores invaded with each throat # Find volume of these pores P12_vol = sp.zeros((self.Nt,)) P12_vol[d] = self._net['pore.volume'][P12[c]] # Add invaded throat volume to pore volume (if invaded) T_vol = P12_vol + self._net['throat.volume'] # Cumulative sum on the sorted throats gives cumulated inject volume e = sp.cumsum(T_vol[b] / flowrate) t = sp.zeros((self.Nt,)) t[b] = e # Convert back to original order self._phase['throat.invasion_time'] = t
def makeinputh5(Iono, basedir): basedir = Path(basedir).expanduser() Param_List = Iono.Param_List dataloc = Iono.Cart_Coords times = Iono.Time_Vector velocity = Iono.Velocity zlist, idx = sp.unique(dataloc[:, 2], return_inverse=True) siz = list(Param_List.shape[1:]) vsiz = list(velocity.shape[1:]) datalocsave = sp.column_stack( (sp.zeros_like(zlist), sp.zeros_like(zlist), zlist)) outdata = sp.zeros([len(zlist)] + siz) outvel = sp.zeros([len(zlist)] + vsiz) for izn, iz in enumerate(zlist): arr = sp.argwhere(idx == izn) outdata[izn] = sp.mean(Param_List[arr], axis=0) outvel[izn] = sp.mean(velocity[arr], axis=0) Ionoout = IonoContainer(datalocsave, outdata, times, Iono.Sensor_loc, ver=0, paramnames=Iono.Param_Names, species=Iono.Species, velocity=outvel) ofn = basedir / 'startdata.h5' print('writing {}'.format(ofn)) Ionoout.saveh5(str(ofn))
def train(self): if self.__algo_model=="MWUrt" or self.__algo_model=="WCrt": data = asso.MatrixData(x=self.__x,y=self.__y,covariates=self.__cov) self.__ass.setData(data) self.__ass.train() else: self.__ass.setPhenotype(self.__y) self.__ass.setGenotype(self.__x) if not self.__cov is None: self.__ass.setCovariates(sp.column_stack([sp.ones(self.__y.shape),self.__cov])) if self.__permutation==True: if self.__x.shape[1]<1000: self.__perms = 1000000 elif self.__x.shape[1]<10000: self.__perms = 100000 elif self.__x.shape[1]<100000: self.__perms = 10000 elif self.__x.shape[1]<1000000: self.__perms = 1000 elif self.__x.shape[1]<10000000: self.__perms = 100 else: self.__perms = 10 print "SNPS: ", self.__x.shape[1] print "Perms: ", self.__perms self.__ass.permutations(self.__perms) else: self.__ass.test_associations()
def backsolve(self, T=None, vterm=None): """Solve finite system by backward recursion Parameters ------------- T : int, optional Number of periods of time. Returns ---------- X : array, shape (n, T) Optimal controls. An optimal policy for each starting state V : array, shape (n, T + 1) Value function. """ if T is None: if self.T is not None: T = self.T else: print ("Not a finite time model") return if vterm is None and self.vterm is None: vterm = sp.zeros(self.n) else: vterm = self.vterm x = sp.zeros((self.n, T), dtype=int) v = sp.column_stack((sp.zeros((self.n, T)), vterm)) pstar = sp.zeros((self.n, self.n, T)) for t in sp.arange(T - 1, -1, -1): v[ :, t] , x[ :, t] = self.valmax(v[ : , t + 1]) pstar[..., t] = self.valpol(x[:, t])[0] return (x, v, pstar)
def train(self): if self.__algo_model == "MWUrt" or self.__algo_model == "WCrt": data = asso.MatrixData(x=self.__x, y=self.__y, covariates=self.__cov) self.__ass.setData(data) self.__ass.train() else: self.__ass.setPhenotype(self.__y) self.__ass.setGenotype(self.__x) if not self.__cov is None: self.__ass.setCovariates( sp.column_stack([sp.ones(self.__y.shape), self.__cov])) if self.__permutation == True: if self.__x.shape[1] < 1000: self.__perms = 1000000 elif self.__x.shape[1] < 10000: self.__perms = 100000 elif self.__x.shape[1] < 100000: self.__perms = 10000 elif self.__x.shape[1] < 1000000: self.__perms = 1000 elif self.__x.shape[1] < 10000000: self.__perms = 100 else: self.__perms = 10 print "SNPS: ", self.__x.shape[1] print "Perms: ", self.__perms self.__ass.permutations(self.__perms) else: self.__ass.test_associations()
def kmeanspp_initialisation( self, X ): """Initialise means using K-Means++""" N, _ = X.shape k, d = self.k, self.d M = [] # Choose one center amongst the X at random m = sc.random.randint( N ) M.append( X[m] ) # Choose k centers while( len( M ) < self.k ): # Create a probability distribution D^2 from the previous mean D = cdist( X, M ).min( 1 )**2 assert( D.shape == (N,) ) # Normalise and sample a new point D /= D.sum() m = sc.random.multinomial( 1, D ).argmax() M.append( X[m] ) M = sc.column_stack( M ) sigma = sc.sqrt(cdist( X, M.T, 'sqeuclidean').sum(0)/(N)) w = ones( k )/float(k) return M, sigma, w
def PathSPCA(A, k): M, N = A.shape # Loop through variables As = ((A * A).sum(axis=0)) vmax = As.max() vp = As.argmax() subset = [vp] vars = [] res = subset rhos = [(A[:, vp] * A[:, vp]).sum()] Stemp = array([rhos]) for i in range(1, k): lev, v = la.eig(Stemp) vars.append(real(lev).max()) vp = real(lev).argmax() x = dot(A[:, subset], v[:, vp]) x = x / la.norm(x) seto = list(range(0, N)) for j in subset: seto.remove(j) vals = dot(x.T, A[:, seto]) vals = vals * vals rhos.append(vals.max()) vpo = seto[vals.argmax()] Stemp = column_stack((Stemp, dot(A[:, subset].T, A[:, vpo]))) vbuf = append(dot(A[:, vpo].T, A[:, subset]), array([(A[:, vpo] * A[:, vpo]).sum()])) Stemp = row_stack((Stemp, vbuf)) subset.append(vpo) lev, v = la.eig(Stemp) vars.append(real(lev).max()) return vars, res, rhos
def makeinputh5(Iono,basedir): basedir = Path(basedir).expanduser() Param_List = Iono.Param_List dataloc = Iono.Cart_Coords times = Iono.Time_Vector velocity = Iono.Velocity zlist,idx = sp.unique(dataloc[:,2],return_inverse=True) siz = list(Param_List.shape[1:]) vsiz = list(velocity.shape[1:]) datalocsave = sp.column_stack((sp.zeros_like(zlist),sp.zeros_like(zlist),zlist)) outdata = sp.zeros([len(zlist)]+siz) outvel = sp.zeros([len(zlist)]+vsiz) for izn,iz in enumerate(zlist): arr = sp.argwhere(idx==izn) outdata[izn]=sp.mean(Param_List[arr],axis=0) outvel[izn]=sp.mean(velocity[arr],axis=0) Ionoout = IonoContainer(datalocsave,outdata,times,Iono.Sensor_loc,ver=0, paramnames=Iono.Param_Names, species=Iono.Species,velocity=outvel) ofn = basedir/'startdata.h5' print('writing {}'.format(ofn)) Ionoout.saveh5(str(ofn))
def generate_data_case(sigma, beta, rho, N, t_delta, x, y, z): """ Generate output data for a given case for later processing. The output is a 2D array containing the all the states and the conditions used to generate them as columns for the given case. This will simplify the processing and plotting at a later stage. Input: sigma: Integer for the sigma attractor parameter beta: Float64 for the beta attractor parameter rho: Integer for the rho attractor parameter N: Integer for the total number of steps of the solver t_delta: Float64 for the step size of the solver x: Scipy array for the x-positions y: Scipy array for the y-positions z: Scipy array for the z-positions """ S = sigma * sp.ones([N, 1]) B = beta * sp.ones([N, 1]) R = rho * sp.ones([N, 1]) Na = N * sp.ones([N, 1]) T = t_delta * sp.ones([N, 1]) data = sp.column_stack((S, B, R, Na, T, x, y, z)) return data
def kmeanspp_initialisation(self, X): """Initialise means using K-Means++""" N, _ = X.shape k, d = self.k, self.d M = [] # Choose one center amongst the X at random m = sc.random.randint(N) M.append(X[m]) # Choose k centers while (len(M) < self.k): # Create a probability distribution D^2 from the previous mean D = cdist(X, M).min(1)**2 assert (D.shape == (N, )) # Normalise and sample a new point D /= D.sum() m = sc.random.multinomial(1, D).argmax() M.append(X[m]) M = sc.column_stack(M) sigma = sc.sqrt(cdist(X, M.T, 'sqeuclidean').sum(0) / (N)) w = ones(k) / float(k) return M, sigma, w
def get_data(): # data is a 2d array which now contains the data data = sp.genfromtxt(os.path.join(DATA_DIR, "visitors_per_hour.txt"), delimiter="\t") temp = data[0:740 * 24, 1] temp_1 = data[0:740 * 24, 0] day = sp.arange(1, 366, 1) #print(day) hits = data[0:731, 1] #print(temp) # Cleaning the data i.e. removing NAN values if present temp = temp[~sp.isnan(temp)] for i in range(1, len(temp)): temp_1[i - 1] = temp[i] - temp[i - 1] #print(temp_1) for i in range(0, 731): x = 0 for j in range(0, 24): x += temp_1[i * 24 + j] hits[i] = x sp.savetxt(os.path.join(DATA_DIR, "visitors_per_day_train.txt"), sp.column_stack([day.astype(int), hits[0:365].astype(int)]), fmt='%.18g', delimiter=' ') return [day, hits]
def makeinputh5(Iono,basedir): """This will make a h5 file for the IonoContainer that can be used as starting points for the fitter. The ionocontainer taken will be average over the x and y dimensions of space to make an average value of the parameters for each altitude. Inputs Iono - An instance of the Ionocontainer class that will be averaged over so it can be used for fitter starting points. basdir - A string that holds the directory that the file will be saved to. """ # Get the parameters from the original data Param_List = Iono.Param_List dataloc = Iono.Cart_Coords times = Iono.Time_Vector velocity = Iono.Velocity zlist,idx = sp.unique(dataloc[:,2],return_inverse=True) siz = list(Param_List.shape[1:]) vsiz = list(velocity.shape[1:]) datalocsave = sp.column_stack((sp.zeros_like(zlist),sp.zeros_like(zlist),zlist)) outdata = sp.zeros([len(zlist)]+siz) outvel = sp.zeros([len(zlist)]+vsiz) # Do the averaging across space for izn,iz in enumerate(zlist): arr = sp.argwhere(idx==izn) outdata[izn]=sp.mean(Param_List[arr],axis=0) outvel[izn]=sp.mean(velocity[arr],axis=0) Ionoout = IonoContainer(datalocsave,outdata,times,Iono.Sensor_loc,ver=0, paramnames=Iono.Param_Names, species=Iono.Species,velocity=outvel) Ionoout.saveh5(os.path.join(basedir,'startdata.h5'))
def run(self): # Parameters passed are current data array, along with time step # between current data points self.times = sp.arange(0,self.Tfinal,self.dt) self.sim = odeint(self.eqns,self.init,self.times,(self.inj,self.injdt)) sp.savetxt('simulation.txt',sp.column_stack((self.times,self.sim)))
def recover_topics( P, T, k, a0 ): """Recover the k components given input Pairs and Triples and $\\alpha_0$""" # Consider the k rank approximation of P, P = approxk( P, k ) # Get the whitening matrix and coloring matrices W, Wt = get_whitener( P, k ) # Whiten the third moment Tw = lambda theta: W.T.dot( T( W.dot(theta) ) ).dot( W ) # Project Tw onto a matrix theta = orthogonal( k ).T[0] U, S, _ = svd( Tw( theta ) ) assert( (S > 1e-10).all() ) # Make sure it is non-singular O = [] for i in xrange( k ): v = U.T[i] Zinv = (a0 + 2)/2 * (v.T.dot(Tw(v)).dot(v)) O.append( Zinv * Wt.T.dot( v ) ) O = sc.column_stack( O ) return abs( O )
def makeinputh5(Iono,basedir): """This will make a h5 file for the IonoContainer that can be used as starting points for the fitter. The ionocontainer taken will be average over the x and y dimensions of space to make an average value of the parameters for each altitude. Inputs Iono - An instance of the Ionocontainer class that will be averaged over so it can be used for fitter starting points. basdir - A string that holds the directory that the file will be saved to. """ # Get the parameters from the original data Param_List = Iono.Param_List dataloc = Iono.Cart_Coords times = Iono.Time_Vector velocity = Iono.Velocity zlist,idx = sp.unique(dataloc[:,2],return_inverse=True) siz = list(Param_List.shape[1:]) vsiz = list(velocity.shape[1:]) datalocsave = sp.column_stack((sp.zeros_like(zlist),sp.zeros_like(zlist),zlist)) outdata = sp.zeros([len(zlist)]+siz) outvel = sp.zeros([len(zlist)]+vsiz) # Do the averaging across space for izn,iz in enumerate(zlist): arr = sp.argwhere(idx==izn) outdata[izn] = sp.mean(Param_List[arr],axis=0) outvel[izn] = sp.mean(velocity[arr],axis=0) Ionoout = IonoContainer(datalocsave,outdata,times,Iono.Sensor_loc,ver=0, paramnames=Iono.Param_Names, species=Iono.Species,velocity=outvel) Ionoout.saveh5(basedir/'startdata.h5')
def historial(): """Funcion que permite graficar la energia a lo largo del tiempo especificiado por el usuario""" global EnergiaK, EnergiaP, EnergiaT t = dt * np.arange(npasos_temporales + 1) plt.figure('Energias del sistema') plt.title('Energies') plt.plot(t, EnergiaP, 'b', label='Potential') plt.plot(t, EnergiaK, 'r', label='Kinetic') plt.plot(t, EnergiaT, 'black', label='Total') plt.xlabel('t', fontsize=18) plt.xticks(np.linspace(0, 14, 6), fontsize=18) plt.yticks(np.linspace(0, 35e-7, 6), fontsize=18) plt.ylim(0, 40e-7) plt.xlim(0, 14) plt.legend(loc=1) plt.ticklabel_format(style='sci', axis='y', scilimits=(0, 0)) plt.figure('Potential Energy') plt.plot(t, EnergiaP, 'b') plt.xlabel('t', fontsize=18) plt.ylabel('Ex Energy', fontsize=18) plt.xticks(np.linspace(0, 100, 11), fontsize=18) plt.yticks(np.linspace(0, 16, 8), fontsize=18) plt.xlim(0, 100) plt.ylim(0, 25) if os.path.exists("Energias") and\ os.path.isfile("Energias/Energias.png")==\ True: os.remove("Energias/Energias.png") plt.savefig('Energias.png', dpi=720) shutil.move('Energias.png', "Energias") os.remove("Energias/energies.out") # Escribe y guarda el archivo con los valores de la energia en el tiempo: sp.savetxt('energies.out', sp.column_stack((t, EnergiaP, EnergiaK, EnergiaT)), fmt=('%1.4e', '%1.4e', '%1.4e', '%1.4e')) shutil.move('energies.out', "Energias") else: os.mkdir("Energias") plt.savefig('Energias.png', dpi=720) shutil.move('Energias.png', "Energias") # Escribe y guarda el archivo con los valores de la energia en el tiempo: sp.savetxt('energies.out', sp.column_stack((t, EnergiaP, EnergiaK, EnergiaT)), fmt=('%1.4e', '%1.4e', '%1.4e', '%1.4e')) shutil.move('energies.out', "Energias")
def analysisdump(maindir, configfile, suptitle=None): """ This function will perform all of the plotting functions in this module given the main directory that all of the files live. Inputs maindir - The directory for the simulation. configfile - The name of the configuration file used. suptitle - The supertitle used on the files. """ maindir = Path(maindir) plotdir = maindir.joinpath("AnalysisPlots") if not plotdir.is_dir(): plotdir.mkdir() # plot spectrums filetemplate1 = str(maindir.joinpath("AnalysisPlots", "Spec")) filetemplate3 = str(maindir.joinpath("AnalysisPlots", "ACF")) filetemplate4 = str(maindir.joinpath("AnalysisPlots", "AltvTime")) (sensdict, simparams) = readconfigfile(configfile) angles = simparams["angles"] ang_data = sp.array([[iout[0], iout[1]] for iout in angles]) if not sensdict["Name"].lower() in ["risr", "pfisr"]: ang_data_temp = ang_data.copy() beamlistlist = sp.array(simparams["outangles"]).astype(int) ang_data = sp.array([ang_data_temp[i].mean(axis=0) for i in beamlistlist]) zenang = ang_data[sp.argmax(ang_data[:, 1])] rnggates = simparams["Rangegatesfinal"] rngchoices = sp.linspace(sp.amin(rnggates), sp.amax(rnggates), 4) angtile = sp.tile(zenang, (len(rngchoices), 1)) coords = sp.column_stack((sp.transpose(rngchoices), angtile)) times = simparams["Timevec"] filetemplate2 = str(maindir.joinpath("AnalysisPlots", "Params")) if simparams["Pulsetype"].lower() == "barker": params = ["Ne"] if suptitle is None: plotbeamparametersv2(times, configfile, maindir, params=params, filetemplate=filetemplate2, werrors=True) else: plotbeamparametersv2( times, configfile, maindir, params=params, filetemplate=filetemplate2, suptitle=suptitle, werrors=True ) else: params = ["Ne", "Nepow", "Te", "Ti", "Vi"] if suptitle is None: plotspecs(coords, times, configfile, maindir, cartcoordsys=False, filetemplate=filetemplate1) plotacfs(coords, times, configfile, maindir, cartcoordsys=False, filetemplate=filetemplate3) plotbeamparametersv2(times, configfile, maindir, params=params, filetemplate=filetemplate2, werrors=True) beamvstime(configfile, maindir, params=params, filetemplate=filetemplate4) else: plotspecs( coords, times, configfile, maindir, cartcoordsys=False, filetemplate=filetemplate1, suptitle=suptitle ) plotacfs( coords, times, configfile, maindir, cartcoordsys=False, filetemplate=filetemplate3, suptitle=suptitle ) plotbeamparametersv2( times, configfile, maindir, params=params, filetemplate=filetemplate2, suptitle=suptitle, werrors=True ) beamvstime(configfile, maindir, params=params, filetemplate=filetemplate4, suptitle=suptitle)
def getColourFeatures(df): BV = sp.array(df["BV"].tolist()) BR = sp.array(df["BR"].tolist()) BI = sp.array(df["BI"].tolist()) VR = sp.array(df["VR"].tolist()) VI = sp.array(df["VI"].tolist()) RI = sp.array(df["RI"].tolist()) return sp.column_stack((BV, BR, BI, VR, VI, RI))
def generate_Custom(cleaned, file_name, line_count, state_th, state_tth, state_chi, state_phi, state_x, state_y, state_temp, start, stop): line_count = int(line_count) start = int(start) stop = int(stop) temperature = cleaned[:,0] - 273 chi = cleaned[:,1] phi = cleaned[:,2] tx = cleaned[:,3] ty = cleaned[:,4] om = cleaned[:,5] tth = cleaned[:,7] angle = cleaned[:,8] intensity = cleaned[:,9] if ((start!=0) or (stop!=0)): temperature = crop_data(temperature,line_count, start, stop, 0, 0) chi = crop_data(chi,line_count, start, stop, 0, 0) phi = crop_data(phi,line_count, start, stop, 0, 0) tx = crop_data(tx,line_count, start, stop, 0, 0) ty = crop_data(ty,line_count, start, stop, 0, 0) om = crop_data(om,line_count, start, stop, 0, 0) tth = crop_data(tth,line_count, start, stop, 0, 0) angle = crop_data(angle,line_count, start, stop, 0, 0) intensity = crop_data(intensity,line_count, start, stop, 0, 0) line_count = line_count - start - stop temperature = temperature[::int(line_count)] chi = chi[::int(line_count)] phi = phi[::int(line_count)] tx = tx[::int(line_count)] ty = ty[::int(line_count)] om = om[::int(line_count)] tth = tth[::int(line_count)] angle = angle[:int(line_count):] int_matrix = intensity.reshape(int(len(intensity)/line_count), int(line_count)) for i in range(shape(int_matrix)[0]): out_scan = column_stack((angle, int_matrix[i,:])) name = file_name + "_Scan"+str(i) if state_temp == 1: name += " T= " + str(round(temperature[i],2)) if state_chi == 1: name += " CHI= " + str(round(chi[i],2)) if state_phi == 1: name += " PHI= " + str(round(phi[i],2)) if state_x == 1: name += " X= " + str(round(tx[i],2)) if state_y == 1: name += " Y= " + str(round(ty[i],2)) if state_th == 1: name += " TH= " + str(round(om[i],3)) if state_tth == 1: name += " TTH= " + str(round(tth[i],3)) savetxt(name+".txt", out_scan, fmt = '%10.8f') return 1
def SRIparams2iono(filename): fullfile = h5file(filename) fullfiledict = fullfile.readWholeh5file() #Size = Nrecords x Nbeams x Nranges x Nions+1 x 4 (fraction, temperature, collision frequency, LOS speed) fits = fullfiledict['/FittedParams']['Fits'] (nt,nbeams,nrng,nspecs,nstuff) = fits.shape nlocs = nbeams*nrng fits = fits.transpose((1,2,0,3,4)) fits = fits.reshape((nlocs,nt,nspecs,nstuff)) # Nrecords x Nbeams x Nranges Ne = fullfiledict['/FittedParams']['Ne'] Ne = Ne.transpose((1,2,0)) Ne = Ne.reshape((nlocs,nt)) param_lists =sp.zeros((nlocs,nt,nspecs,2)) param_lists[:,:,:,0] = fits[:,:,:,0] param_lists[:,:,:,1] = fits[:,:,:,1] param_lists[:,:,-1,0]=Ne Velocity = fits[:,:,0,3] if fullfiledict['/FittedParams']['IonMass']==16: species = ['O+','e-'] pnames = sp.array([['Ni','Ti'],['Ne','Te']]) time= fullfiledict['/Time']['UnixTime'] time = time rng = fullfiledict['/FittedParams']['Range'] bco = fullfiledict['/']['BeamCodes'] angles = bco[:,1:3] (nang,nrg) = rng.shape allang = sp.tile(angles[:,sp.newaxis],(1,nrg,1)) all_loc = sp.column_stack((rng.flatten(),allang.reshape(nang*nrg,2))) lkeep = ~ sp.any(sp.isnan(all_loc),1) all_loc = all_loc[lkeep] Velocity = Velocity[lkeep] param_lists = param_lists[lkeep] all_loc[:,0]=all_loc[:,0]*1e-3 iono1 = IonoContainer(all_loc,param_lists,times=time,ver = 1,coordvecs = ['r','theta','phi'], paramnames = pnames,species=species,velocity=Velocity) # MSIS tn = fullfiledict['/MSIS']['Tn'] tn = tn.transpose((1,2,0)) tn = tn.reshape((nlocs,nt)) startparams = sp.ones((nlocs,nt,2,2)) startparams[:,:,0,1] = tn startparams[:,:,1,1] = tn startparams = startparams[lkeep] ionoS = IonoContainer(all_loc,startparams,times=time,ver = 1,coordvecs = ['r','theta','phi'], paramnames = pnames,species=species) return iono1,ionoS
def count_feature(X, tbl_lst = None, min_cnt = 1): X_lst = [pd.Series(X[:, i]) for i in range(X.shape[1])] if tbl_lst is None: tbl_lst = [x.value_counts() for x in X_lst] if min_cnt > 1: tbl_lst = [s[s >= min_cnt] for s in tbl_lst] X = sp.column_stack([x.map(tbl).values for x, tbl in zip(X_lst, tbl_lst)]) # NA(unseen values) to 0 return np.nan_to_num(X), tbl_lst
def planeFromPoints(points): """ Produces a plane from N points using least squares, and returns of the form: Z = aX + bY + c Follows logic from http://www.velocityreviews.com/forums/t368189-re-linear-regression-in-3-dimensions.html """ x, y, z = zip(*points) A = column_stack([x, y, ones_like(x)]) abc, residuals, rank, s = lstsq(A,z) return abc
def count_feature(X, tbl_lst=None, min_cnt=1): X_lst = [pd.Series(X[:, i]) for i in range(X.shape[1])] if tbl_lst is None: tbl_lst = [x.value_counts() for x in X_lst] if min_cnt > 1: tbl_lst = [s[s >= min_cnt] for s in tbl_lst] X = sp.column_stack([x.map(tbl).values for x, tbl in zip(X_lst, tbl_lst)]) # NA(unseen values) to 0 return np.nan_to_num(X), tbl_lst
def centre_of_mass(geometry, vertices='throat.offset_vertices', **kwargs): r""" Calculate the centre of mass of the throat from the voronoi vertices. """ Nt = geometry.num_throats() outer_verts = geometry['throat.vertices'] offset_verts = geometry[vertices] normal = geometry['throat.normal'] z_axis = [0, 0, 1] value = _sp.ndarray([Nt, 3]) for i in range(Nt): if len(offset_verts[i]) > 2: verts = offset_verts[i] elif len(outer_verts[i]) > 2: verts = outer_verts[i] else: verts = [] if len(verts) > 0: # For boundaries some facets will already be aligned with the axis - # if this is the case a rotation is unnecessary and could also cause # problems angle = tr.angle_between_vectors(normal[i], z_axis) if angle == 0.0 or angle == _sp.pi: "We are already aligned" rotate_input = False facet = verts else: rotate_input = True M = tr.rotation_matrix( tr.angle_between_vectors(normal[i], z_axis), tr.vector_product(normal[i], z_axis)) facet = _sp.dot(verts, M[:3, :3].T) # Now we have a rotated facet aligned with the z axis - make 2D facet_2D = _sp.column_stack((facet[:, 0], facet[:, 1])) z = _sp.unique(_sp.around(facet[:, 2], 10)) if len(z) == 1: # We need the vertices arranged in order so perform a convex hull hull = ConvexHull(facet_2D) ordered_facet_2D = facet_2D[hull.vertices] # Call the routine to calculate an area wighted centroid from the # 2D polygon COM_2D = vo.PolyWeightedCentroid2D(ordered_facet_2D) COM_3D = _sp.hstack((COM_2D, z)) # If we performed a rotation we need to rotate back if (rotate_input): MI = tr.inverse_matrix(M) # Unrotate the offset coordinates using the inverse of the # original rotation matrix value[i] = _sp.dot(COM_3D, MI[:3, :3].T) else: value[i] = COM_3D else: logger.error('Rotation Failed: ' + str(_sp.unique(facet[:, 2]))) return value
def compare_arrays(a, b, inc_time=False): diffAbs = (sp.absolute(sp.subtract(a, b))) diffMax = sp.nanmax(diffAbs) diffEps = (diffMax / sys.float_info.epsilon ) * 2 # we have to mult by two as python epsilon is not correct if inc_time: return sp.column_stack((a[:, 0], diffAbs)), diffMax, diffEps else: return diffAbs, diffMax, diffEps
def nstats(self, upto): """Given an n-values array returns the average and standard deviation of the n-values taken at each stage-height for each cross-section. 'nvals' - array of n-values with shape (x,y) with cross-sections along the x axis and stage-heights along the y axis""" self.get_xs_n(upto) means = scipy.mean(self.xs_nvals, axis=1) stdevs = scipy.std(self.xs_nvals, axis=1) # if filename = True: # Write to csv return scipy.column_stack((self.handstage[:upto], means, stdevs))
def __init__(self,ionoin,configfile): r2d = 180.0/sp.pi d2r = sp.pi/180.0 (sensdict,simparams) = readconfigfile(configfile) nt = ionoin.Time_Vector.shape[0] nloc = ionoin.Sphere_Coords.shape[0] #Input location self.Cart_Coords_in = ionoin.Cart_Coords self.Sphere_Coords_In = ionoin.Sphere_Coords, self.Time_In = ionoin.Time_Vector self.Cart_Coords_In_Rep = sp.tile(ionoin.Cart_Coords,(nt,1)) self.Sphere_Coords_In_Rep = sp.tile(ionoin.Sphere_Coords,(nt,1)) self.Time_In_Rep = sp.repeat(ionoin.Time_Vector,nloc,axis=0) #output locations rng_vec2 = simparams['Rangegatesfinal'] nrgout = len(rng_vec2) angles = simparams['angles'] nang =len(angles) ang_data = sp.array([[iout[0],iout[1]] for iout in angles]) rng_all = sp.tile(rng_vec2,(nang)) ang_all = sp.repeat(ang_data,nrgout,axis=0) nlocout = nang*nrgout ntout = len(simparams['Timevec']) self.Sphere_Coords_Out = sp.column_stack((rng_all,ang_all)) (R_vec,Az_vec,El_vec) = (self.Sphere_Coords_Out[:,0],self.Sphere_Coords_Out[:,1],self.Sphere_Coords_Out[:,2]) xvecmult = sp.cos(Az_vec*d2r)*sp.cos(El_vec*d2r) yvecmult = sp.sin(Az_vec*d2r)*sp.cos(El_vec*d2r) zvecmult = sp.sin(El_vec*d2r) X_vec = R_vec*xvecmult Y_vec = R_vec*yvecmult Z_vec = R_vec*zvecmult self.Cart_Coords_Out = sp.column_stack((X_vec,Y_vec,Z_vec)) self.Time_Out = simparams['Timevec'] self.Time_Out_Rep =sp.repeat(simparams['Timevec'],nlocout,axis=0) self.Sphere_Coords_Out_Rep =sp.tile(self.Sphere_Coords_Out,(ntout,1)) self.RSTMat = makematPA(ionoin.Sphere_Coords,ionoin.Time_Vector)
def pyglowinput(latlonalt=[65.1367, -147.4472, 250.00], dn_list=[datetime(2015, 3, 21, 8, 00), datetime(2015, 3, 21, 20, 00)], z=None): if z is None: z = sp.linspace(50., 1000., 200) dn_diff = sp.diff(dn_list) dn_diff_sec = dn_diff[-1].seconds timelist = sp.array([calendar.timegm(i.timetuple()) for i in dn_list]) time_arr = sp.column_stack((timelist, sp.roll(timelist, -1))) time_arr[-1, -1] = time_arr[-1, 0]+dn_diff_sec v=[] coords = sp.column_stack((sp.zeros((len(z), 2), dtype=z.dtype), z)) all_spec = ['O+', 'NO+', 'O2+', 'H+', 'HE+'] Param_List = sp.zeros((len(z), len(dn_list),len(all_spec),2)) for idn, dn in enumerate(dn_list): for iz, zcur in enumerate(z): latlonalt[2] = zcur pt = Point(dn, *latlonalt) pt.run_igrf() pt.run_msis() pt.run_iri() # so the zonal pt.u and meriodinal winds pt.v will coorispond to x and y even though they are # supposed to be east west and north south. Pyglow does not seem to have # vertical winds. v.append([pt.u, pt.v, 0]) for is1, ispec in enumerate(all_spec): Param_List[iz, idn, is1, 0] = pt.ni[ispec]*1e6 Param_List[iz, idn, :, 1] = pt.Ti Param_List[iz, idn, -1, 0] = pt.ne*1e6 Param_List[iz, idn, -1, 1] = pt.Te Param_sum = Param_List[:, :, :, 0].sum(0).sum(0) spec_keep = Param_sum > 0. species = sp.array(all_spec)[spec_keep[:-1]].tolist() species.append('e-') Param_List[:, :] = Param_List[:, :, spec_keep] Iono_out = IonoContainer(coords, Param_List, times = time_arr, species=species) return Iono_out
def centre_of_mass(geometry, vertices='throat.offset_vertices', **kwargs): r""" Calculate the centre of mass of the throat from the voronoi vertices. """ Nt = geometry.num_throats() outer_verts = geometry['throat.vertices'] offset_verts = geometry[vertices] normal = geometry['throat.normal'] z_axis = [0, 0, 1] value = _sp.ndarray([Nt, 3]) for i in range(Nt): if len(offset_verts[i]) > 2: verts = offset_verts[i] elif len(outer_verts[i]) > 2: verts = outer_verts[i] else: verts = [] if len(verts) > 0: # For boundaries some facets will already be aligned with the axis - # if this is the case a rotation is unnecessary and could also cause # problems angle = tr.angle_between_vectors(normal[i], z_axis) if angle == 0.0 or angle == _sp.pi: "We are already aligned" rotate_input = False facet = verts else: rotate_input = True M = tr.rotation_matrix(tr.angle_between_vectors(normal[i], z_axis), tr.vector_product(normal[i], z_axis)) facet = _sp.dot(verts, M[:3, :3].T) # Now we have a rotated facet aligned with the z axis - make 2D facet_2D = _sp.column_stack((facet[:, 0], facet[:, 1])) z = _sp.unique(_sp.around(facet[:, 2], 10)) if len(z) == 1: # We need the vertices arranged in order so perform a convex hull hull = ConvexHull(facet_2D) ordered_facet_2D = facet_2D[hull.vertices] # Call the routine to calculate an area wighted centroid from the # 2D polygon COM_2D = vo.PolyWeightedCentroid2D(ordered_facet_2D) COM_3D = _sp.hstack((COM_2D, z)) # If we performed a rotation we need to rotate back if (rotate_input): MI = tr.inverse_matrix(M) # Unrotate the offset coordinates using the inverse of the # original rotation matrix value[i] = _sp.dot(COM_3D, MI[:3, :3].T) else: value[i] = COM_3D else: print('Rotation Failed: ' + str(_sp.unique(facet[:, 2]))) return value
def run_clock(beta_file): """ Function to run clock given the path to a CSV file containing the normalised beta values """ probes = np.load("model/probes.npy") newx, samples = read_file(beta_file, probes) nbeta = np.load("model/nbeta.npy") result = scipy.dot(scipy.column_stack((scipy.ones([newx.shape[0], 1]), newx)), nbeta)[:, 0] return scipy.row_stack((samples, result.flatten()))
def makedata(testpath,tint): """ This will make the input data for the test case. The data will have cases where there will be enhancements in Ne, Ti and Te in one location. Each case will have 3 integration periods. The first 3 integration periods will be the default set of parameters Ne=Ne=1e11 and Te=Ti=2000. Inputs testpath - Directory that will hold the data. tint - The integration time in seconds. """ testpath = Path(testpath).expanduser() finalpath = testpath.joinpath('Origparams') if not finalpath.is_dir(): finalpath.mkdir() data = sp.array([[1e11,1100.], [1e11,2100.]]) z = (50.+sp.arange(50)*10.) nz = len(z) params = sp.tile(data[sp.newaxis, sp.newaxis],(nz, 1, 1, 1)) epnt = range(20,22) p2 = sp.tile(params, (1, 4, 1, 1)) #enhancement in Ne p2[epnt, 1, :, 0] = 5e11 #enhancement in Ti p2[epnt,2,0,1] = 2200. #enhancement in Te p2[epnt,3,1,1] = 4200. coords = sp.column_stack((sp.zeros(nz), sp.zeros(nz), z)) species=['O+', 'e-'] times = sp.array([[0, 1e3]]) times2 = sp.column_stack((sp.arange(0, 4), sp.arange(1, 5)))*3*tint vel = sp.zeros((nz, 1, 3)) vel2 = sp.zeros((nz, 4, 3)) Icontstart = IonoContainer(coordlist=coords, paramlist=params, times=times, sensor_loc=sp.zeros(3), ver=0, coordvecs=['x', 'y', 'z'], paramnames=None, species=species, velocity=vel) Icont1 = IonoContainer(coordlist=coords, paramlist=p2, times=times2, sensor_loc=sp.zeros(3), ver=0, coordvecs=['x', 'y', 'z'], paramnames=None, species=species, velocity=vel2) finalfile = finalpath.joinpath('0 stats.h5') Icont1.saveh5(str(finalfile)) Icontstart.saveh5(str(testpath.joinpath('startfile.h5')))
def makedata(testpath,tint): """ This will make the input data for the test case. The data will have cases where there will be enhancements in Ne, Ti and Te in one location. Each case will have 3 integration periods. The first 3 integration periods will be the default set of parameters Ne=Ne=1e11 and Te=Ti=2000. Inputs testpath - Directory that will hold the data. tint - The integration time in seconds. """ finalpath = os.path.join(testpath,'Origparams') if not os.path.isdir(finalpath): os.mkdir(finalpath) z = sp.linspace(50.,750,150) nz = len(z) Z_0 = 250. H_0=30. N_0=6.5e11 c1 = Chapmanfunc(z,H_0,Z_0,N_0)+5e10 z0=50. T0=600. Te,Ti = TempProfile(z,T0,z0) params = sp.zeros((nz,1,2,2)) params[:,0,0,0] = c1 params[:,0,1,0] = c1 params[:,0,0,1] = Ti params[:,0,1,1] = Te coords = sp.column_stack((sp.zeros(nz),sp.zeros(nz),z)) species=['O+','e-'] times = sp.array([[0,1e3]]) times2 = sp.column_stack((sp.arange(0,1),sp.arange(1,2)))*3*tint vel = sp.zeros((nz,1,3)) vel2 = sp.zeros((nz,4,3)) Icontstart = IonoContainer(coordlist=coords,paramlist=params,times = times,sensor_loc = sp.zeros(3),ver =0,coordvecs = ['x','y','z'],paramnames=None,species=species,velocity=vel) Icont1 = IonoContainer(coordlist=coords,paramlist=params,times = times,sensor_loc = sp.zeros(3),ver =0,coordvecs = ['x','y','z'],paramnames=None,species=species,velocity=vel2) finalfile = os.path.join(finalpath,'0 stats.h5') Icont1.saveh5(finalfile) Icontstart.saveh5(os.path.join(testpath,'startfile.h5'))
def getFeatures(df): B = sp.array(df["cB"].tolist()) R = sp.array(df["cR"].tolist()) I = sp.array(df["cI"].tolist()) V = sp.array(df["cV"].tolist()) Ha = sp.array(df["lHa"].tolist()) Hb = sp.array(df["lHb"].tolist()) Hg = sp.array(df["lHg"].tolist()) totCounts = sp.array(df["cTotal"].tolist()) randomFeature = sp.random.normal(0.5, 0.2, len(totCounts)) return sp.column_stack((B - V, B - R, B - I, V - R, V - I, R - I, totCounts, Ha, Hb, Hg, randomFeature))
def generate( k, n, scale = 10, prior = "uniform" ): """Generate k topics, each with n words""" if prior == "uniform": # Each topic is a multinomial generated from a Dirichlet topics = sc.column_stack( [ dirichlet( sc.ones( n ) * scale ) for i in xrange( k ) ] ) # We also draw the weights of each topic weights = dirichlet( sc.ones(k) * scale ) return TopicModel( weights, topics ) else: # TODO: Support the anchor word assumption. raise NotImplementedError
def getFeatures(df): BV = sp.array(df["BV"].tolist()) BR = sp.array(df["BR"].tolist()) BI = sp.array(df["BI"].tolist()) VR = sp.array(df["VR"].tolist()) VI = sp.array(df["VI"].tolist()) RI = sp.array(df["RI"].tolist()) Ha = sp.array(df["Ha"].tolist()) Hb = sp.array(df["Hb"].tolist()) Hg = sp.array(df["Hg"].tolist()) totCounts = sp.array(df["totalCounts"].tolist()) #spike = sp.array(df["spike"].tolist()) randomFeature = sp.random.normal(0.5, 0.2, len(totCounts)) return sp.column_stack((BV, BR, BI, VR, VI, RI, totCounts, Ha, Hb, Hg, randomFeature)) #,spike,randomFeature))
def main(self): bins = 50 halfwidth=0.5 histories = 10000 iterations = 50 restarts = 5 geo = Geometry.Geometry(bins, [[-halfwidth,halfwidth]]) xs = CrossSection.CrossSection(xS=0.5,nu=1.0,xF=0.5,xG=0) self.mark = Markov.Markov(geo, xs, histories) self.mark.score = self.score self.Q = scipy.zeros((bins,0)) for i in xrange(bins): print "I am at %i" %i point = scipy.zeros((bins)) point[i] = 1 pSource = fissionSource.histogramSource(point,geo) self.response = fissionBank.fissionBank() self.mark.transport(pSource) q = fissionSource.histogramSource(self.response,geo) q = q*(1.0/self.mark.histories) self.printVector(q) self.Q = scipy.column_stack((self.Q,q)) q = scipy.ones(bins) q = q/scipy.linalg.norm(q,2) print "Calling Deterministic Arnoldi" adtm = ArnoldiDtm.Arnoldi(self.Q, iterations, restarts) eValues, eVectors = adtm.Arnoldi(q) print "Eigenvalues: " self.printVector(eValues) print "Dominant eigenvector: " self.printVector(eVectors[:,-1]) print "\nAll eigenvectors: " self.printM(eVectors) Chart = Gnuplot.Gnuplot() Chart.title("Histories per 'vector': %i, bins = %i" %(histories, bins)) length = len(eValues)-1 for i in xrange(5): data = Gnuplot.Data(eVectors[:,length-i],with='lines', title='vector %i' %i) Chart.replot(data)
def make2dhist(testpath, xaxis=TE, yaxis=TI, figmplf=None, curax=None): """ This will plot a 2-D histogram of two variables. Args: testpath (obj:`str`): The path where the SimISR data is stored. npulses (obj:`int`): The number of pulses. xaxis (obj: `dict`): default TE, Dictionary that holds the parameter info along the x axis of the distribution. yaxis (obj: `dict`): default TE, Dictionary that holds the parameter info along the y axis of the distribution. figmplf (obj: `matplotb figure`): default None, Figure that the plot will be placed on. curax (obj: `matplotlib axis`): default None, Axis that the plot will be made on. Returns: figmplf (obj: `matplotb figure`), curax (obj: `matplotlib axis`):,hist_h (obj: `matplotlib axis`)): The figure handle the plot is made on, the axis handle the plot is on, the plot handle itself. """ sns.set_style("whitegrid") sns.set_context("notebook") params = [xaxis['param'], yaxis['param']] datadict, _, _, _ = makehistdata(params, testpath) if (figmplf is None) and (curax is None): (figmplf, curax) = plt.subplots(1, 1, figsize=(6, 6), facecolor='w') b1 = sp.linspace(*xaxis['lims']) b2 = sp.linspace(*yaxis['lims']) bins = [b1, b2] d1 = sp.column_stack((datadict[params[0]], datadict[params[1]])) H, xe, ye = sp.histogram2d(d1[:, 0].real, d1[:, 1].real, bins=bins, normed=True) hist_h = curax.pcolor(xe[:-1], ye[:-1], sp.transpose(H), cmap='viridis', vmin=0) curax.set_xlabel(r'$' + xaxis['paramLT'] + '$') curax.set_ylabel(r'$' + yaxis['paramLT'] + '$') curax.set_title(r'Joint distributions for $' + xaxis['paramLT'] + '$' + ' and $' + yaxis['paramLT'] + '$') plt.colorbar(hist_h, ax=curax, label='Probability', format='%1.1e') return (figmplf, curax, hist_h)
def _calc_eVectors(self, Q, V): """ calc_eVectors will calculate the eigenvectors. The eigevector is a linear combination of the vectors of Q with the elements of an eigenvector of H as the expansion coefficients. calc_eVecctors returns a matrix who's columns are the eigenvectors. Q: List of orthornormal basis vectors V: Matrix of column vectors """ n = len(V) m = len(Q[0]) Vectors = scipy.zeros((m, 0)) for j in xrange(n): Vector = scipy.zeros(m) for i in xrange(n): Vector = Vector + V[i, j] * Q[i] Vectors = scipy.column_stack((Vectors, Vector)) return Vectors
def _calc_eVectors(self, Q, V): """ calc_eVectors will calculate the eigenvectors. The eigevector is a linear combination of the vectors of Q with the elements of an eigenvector of H as the expansion coefficients. calc_eVecctors returns a matrix who's columns are the eigenvectors. Q: List of orthornormal basis vectors V: Matrix of column vectors """ n = len(V) m = len(Q[0]) Vectors = scipy.zeros((m,0)) for j in xrange(n): Vector = scipy.zeros(m) for i in xrange(n): Vector = Vector + V[i,j]*Q[i] Vectors = scipy.column_stack((Vectors,Vector)) return Vectors
def get_logit_endog(true_params, exog, noise_level): """ Gets an endogenous response that is consistent with the true_params, perturbed by noise at noise_level. """ N = exog.shape[0] ### Create the probability of entering the different classes, ### given exog and true_params Xdotparams = sp.dot(exog, true_params) noise = noise_level * sp.randn(*Xdotparams.shape) eXB = sp.column_stack((sp.ones(len(Xdotparams)), sp.exp(Xdotparams))) class_probabilities = eXB / eXB.sum(1)[:, None] ### Create the endog cdf = class_probabilities.cumsum(axis=1) endog = sp.zeros(N) for i in xrange(N): endog[i] = sp.searchsorted(cdf[i, :], sp.rand()) return endog