def test_load_config_file(): CFG = """ [database] url = sqlite:///mydb.sqlite """ config = ConfigParser() config.read_string(CFG) session, tables = util.load_config(config['database']) assert str(session.bind.url) == 'sqlite:///mydb.sqlite' assert tables == {}
def database_connecting(self): print('connecting') session, tables = load_config(self.db_PARAM) self.session = session self.Site = tables['site'] self.Wfdisc = tables['wfdisc'] self.Affiliation = tables['affiliation'] import pisces.schema.css3 as kba class FK_results(schema.fk_results): __tablename__ = 'FK_results' class FK_params(schema.fk_params): __tablename__ = 'FK_params' class FD_params(schema.fd_params): __tablename__ = 'FD_params' self.dict_namefk = {} self.fdtables = [] for fdi in self.fdtables_names: self.fdtables.append( type(str(fdi), (schema.fd_results, ), {'__tablename__': str(fdi)})) class ASSOC_params(schema.ASSOC_params): __tablename__ = 'ASSOC_params' class ASSOC_results(schema.ASSOC_results): __tablename__ = self.resultstable self.FK_par = FK_params self.FK_results = FK_results self.FD_par = FD_params #self.Fd_results=Fd_results self.ASSOC_par = ASSOC_params self.ASSOC_results = ASSOC_results self.ASSOC_par.__table__.create(self.session.bind, checkfirst=True) self.ASSOC_results.__table__.create(self.session.bind, checkfirst=True) try: self.Passoc_Q=self.session.query(self.ASSOC_par). \ filter(self.ASSOC_par.beamwidth==self.beamwidth).\ filter(self.ASSOC_par.rangemax==self.rangemax). \ filter(self.ASSOC_par.clusterthresh==self.clusterthresh).\ filter(self.ASSOC_par.trimthresh==self.trimthresh).\ filter(self.ASSOC_par.eventdetmin==self.eventdetmin).\ filter(self.ASSOC_par.eventarrmin==self.eventarrmin).\ filter(self.ASSOC_par.duration==self.duration).\ all() if len(self.Passoc_Q) > 1: print( 'issue with the database too many parameters entries, there should be just one' ) embed() if len(self.Passoc_Q) == 1: self.Passoc_Q = self.Passoc_Q[0] except Exception as x1: print("issue with the table or first assoc entered") print(x1) embed() self.Passoc_Q = [] print(Passoc_Q) if bool(self.Passoc_Q) == False: print( 'New process parameters, write process to INFRA_ASSOC_PARAM table' ) new_row = self.session.query(self.ASSOC_par).count() try: res=self.ASSOC_par( beamwidth=self.beamwidth,\ rangemax=self.rangemax,\ clusterthresh=self.clusterthresh,\ trimthresh=self.trimthresh,\ eventdetmin=self.eventdetmin,\ algorithm=self.algorithm,\ eventarrmin=self.eventarrmin,\ duration=self.duration,\ passocid=new_row) except Exception as x1: print('problem writing to the assoc param file') print("Unexpected error:", x1) embed() self.session.add(res) self.session.commit() self.Passoc_Q=self.session.query(self.ASSOC_par). \ filter(self.ASSOC_par.beamwidth==self.beamwidth).\ filter(self.ASSOC_par.rangemax==self.rangemax). \ filter(self.ASSOC_par.clusterthresh==self.clusterthresh).\ filter(self.ASSOC_par.trimthresh==self.trimthresh).\ filter(self.ASSOC_par.eventdetmin==self.eventdetmin).\ filter(self.ASSOC_par.eventarrmin==self.eventarrmin).\ filter(self.ASSOC_par.duration==self.duration).\ one() self.passocid = self.Passoc_Q.passocid #embed() else: print('process already in table: Assoc params table') self.passocid = self.Passoc_Q.passocid print(self.Passoc_Q) self.db_connected = True return self.db_connected
def database_connecting(self): """ connect to database and write parameters for analysis The system connects to the database and establishes the health of the connection, and writes the parameters for processing - ** parameters**, **types**, **return**, and **return types**:: :return: state of the connection True (successful), FALSE (problem), for connection or :rtype: boolean Update: Feb 2018 DB connection established with a user-defined configuration file to specify which standard database tables a user is targeting. Pisces provides a function that reads the format and returns the necessary tables. The configuration file also specifies infrapy-specific database tables for fk processing """ session,tables = load_config(self.db_PARAM) self.session=session self.Site=tables['site'] self.Wfdisc=tables['wfdisc'] class Fk_results(schema.fk_results): __tablename__ = self.resultstable class Fk_params(schema.fk_params): __tablename__ = 'FK_PARAMS' self.FK_par=Fk_params self.FK_results=Fk_results self.FK_par.__table__.create(self.session.bind,checkfirst=True) self.FK_results.__table__.create(self.session.bind,checkfirst=True) try: self.FK_parQuery=self.session.query(self.FK_par). \ filter(self.FK_par.freqmax==self.freqmax).filter(self.FK_par.freqmin==self.freqmin). \ filter(self.FK_par.beamwinlen==self.beamwinlen).filter(self.FK_par.beamwinstep==self.beamwinstep).\ filter(self.FK_par.backazmin==self.backazmin).\ filter(self.FK_par.backazmax==self.backazmax).\ filter(self.FK_par.backazstep==self.backazstep).\ filter(self.FK_par.trvelmin==self.trvelmin).\ filter(self.FK_par.trvelmax==self.trvelmax).\ filter(self.FK_par.trvelstep==self.trvelstep).\ filter(self.FK_par.name==self.name).\ filter(self.FK_par.minslowness==self.minslowness).\ filter(self.FK_par.maxslowness==self.maxslowness).\ filter(self.FK_par.stepslowness==self.stepslowness).\ filter(self.FK_par.name==self.name).\ filter(self.FK_par.numsources==self.numsources).\ filter(self.FK_par.domain==self.domain).\ filter(self.FK_par.algorithm==self.algorithm).\ all() if len(self.FK_parQuery)>1: print('issue with the database too many parameters entries, there should be just one') embed() if len(self.FK_parQuery)==1: self.FK_parQuery=self.FK_parQuery[0] except Exception as ex1: print("issue with the table,",ex1) embed() self.FK_parQuery=[] if bool(self.FK_parQuery)==False: new_row=self.session.query(self.FK_par).count() print('New process parameters, write process to fk_params table pfkid=',new_row) res=self.FK_par(freqmax=self.freqmax,\ freqmin=self.freqmin,\ beamwinlen=self.beamwinlen,\ beamwinstep=self.beamwinstep,\ backazmin=self.backazmin,\ backazmax=self.backazmax,\ backazstep=self.backazstep,\ trvelmin=self.trvelmin,\ trvelmax=self.trvelmax,\ trvelstep=self.trvelstep,\ minslowness=self.minslowness,\ maxslowness=self.maxslowness,\ stepslowness=self.stepslowness,\ name=self.name, \ numsources=self.numsources, \ domain = self.domain, \ algorithm=self.algorithm, \ pfkid=new_row) self.session.add(res) try: self.session.commit() except Exception as ex1: print(ex1,", there is a problem with these parameters") embed() self.FK_parQuery=self.session.query(self.FK_par). \ filter(self.FK_par.freqmax==self.freqmax).filter(self.FK_par.freqmin==self.freqmin). \ filter(self.FK_par.beamwinlen==self.beamwinlen).filter(self.FK_par.beamwinstep==self.beamwinstep).\ filter(self.FK_par.backazmin==self.backazmin).\ filter(self.FK_par.backazmax==self.backazmax).\ filter(self.FK_par.backazstep==self.backazstep).\ filter(self.FK_par.trvelmin==self.trvelmin).\ filter(self.FK_par.trvelmax==self.trvelmax).\ filter(self.FK_par.trvelstep==self.trvelstep).\ filter(self.FK_par.minslowness==self.minslowness).filter(self.FK_par.maxslowness==self.maxslowness).\ filter(self.FK_par.stepslowness==self.stepslowness).\ filter(self.FK_par.name==self.name).\ filter(self.FK_par.numsources==self.numsources).\ filter(self.FK_par.domain==self.domain).\ filter(self.FK_par.algorithm==self.algorithm)\ .one() #embed() else: print('process already in fk_params table, pfkid:', self.FK_parQuery.pfkid) self.db_connected=True return self.db_connected
def database_connecting(self): session, tables = load_config(self.db_PARAM) self.session = session self.Site = tables['site'] self.Wfdisc = tables['wfdisc'] try: class Fk_results(schema.fk_results): __tablename__ = self.fkresults except Exception as Ex1: print('fk_res table already defined') try: class Fk_params(schema.fk_params): __tablename__ = 'FK_PARAMS' except Exception as Ex1: print('FK_PARAMS table already defined') try: class Fd_results(schema.fd_results): __tablename__ = self.fdresults except Exception as Ex1: print('fd_res table already defined') try: class Fd_params(schema.fd_params): __tablename__ = 'FD_PARAMS' except Exception as Ex1: print('FD_PARAMS table already defined') try: self.FK_par = Fk_params self.FK_results = Fk_results self.FD_par = Fd_params self.FD_par.__table__.create(self.session.bind, checkfirst=True) self.FD_results = Fd_results self.FD_results.__table__.create(self.session.bind, checkfirst=True) except Exception as Ex1: print(Ex1) import warnings warnings.filterwarnings("ignore") print("table creation issue line 653") embed() self.db_connected = False return self.db_connected try: self.PFDetect_Q=self.session.query(self.FD_par). \ filter(self.FD_par.detwinlen==self.detwinlen).\ filter(self.FD_par.pthreshold==self.pthreshold). \ filter(self.FD_par.cthr==self.cthr).\ filter(self.FD_par.minlen==self.minlen).\ all() if len(self.PFDetect_Q) > 1: print( 'issue with the database too many parameters entries, there should be just one' ) embed() if len(self.PFDetect_Q) == 1: self.PFDetect_Q = self.PFDetect_Q[0] except Exception as ex1: print("issue with the table", ex1) self.PFDetect_Q = [] if bool(self.PFDetect_Q) == False: print( 'New process parameters, write process to INFRA_DETECT_PARAM table' ) new_row = self.session.query(self.FD_par).count() res=self.FD_par(detwinlen=self.detwinlen,\ pthreshold=self.pthreshold,\ cthr=self.cthr,\ minlen=self.minlen,\ pfdid=new_row) self.session.add(res) self.session.commit() self.PFDetect_Q = self.session.query( self.FD_par).filter(self.FD_par.pfdid == new_row).one() else: print('process already in table: INFRA_DETECT_PARAM table') #embed() print(self.PFDetect_Q[:]) self.db_connected = True return self.db_connected
STA=sys.argv[1] timeini=sys.argv[2] timeend=sys.argv[3] try: chan=sys.argv[4] except Exception as ex1: chan='BDF' ttI=UTCDateTime(timeini) ttE=UTCDateTime(timeend) jdayI = int(ttI.year*1000 + ttI.day) jdayE = int(ttE.year*1000 + ttE.day) config = configparser.ConfigParser() config.read('gnem.cfg') session, tables = load_config(config['database']) session=session Site=tables['site'] Wfdisc_raw=tables['wfdisc'] print('from ',UTCDateTime(timeini), 'to ', UTCDateTime(timeend)) si_res = session.query(Site).filter(Site.refsta == STA).filter(Site.ondate <= jdayI).filter(Site.offdate >= jdayE).all() for si in si_res: wfres=session.query(Wfdisc_raw).filter(Wfdisc_raw.sta==si.sta).filter(Wfdisc_raw.time>float(ttI)).filter(Wfdisc_raw.time<float(ttE)).filter(Wfdisc_raw.chan==chan).all() print('working on:', si.sta) for wf in wfres: trace = wf.to_trace() trace.stats.sac=AttribDict({'stla':si.lat,'stlo': si.lon,'stel': si.elev}) trace.write(STA+str(wf.wfid) + '.sac', format='SAC')
def test_load_config_dict(): config = {'url': 'sqlite:///mydb.sqlite'} session, tables = util.load_config(config) assert str(session.bind.url) == 'sqlite:///mydb.sqlite' assert tables == {}
def database_connecting(self): print('connecting') session,tables = load_config(self.db_PARAM) self.session=session self.Site=tables['site'] self.Wfdisc=tables['wfdisc'] self.Affiliation=tables['affiliation'] import pisces.schema.css3 as kba #class FK_results(schema.fk_results): # __tablename__ = 'FK_results' class FK_params(schema.fk_params): __tablename__ = 'FK_params' class FD_params(schema.fd_params): __tablename__ = 'FD_params' self.dict_namefk={} self.fdtables=[] for fdi in self.fdtables_names: self.fdtables.append(type(str(fdi),(schema.fd_results,),{'__tablename__':str(fdi)})) class ASSOC_params(schema.ASSOC_params): __tablename__ = 'ASSOC_params' class ASSOC_results(schema.ASSOC_results): __tablename__= self.resultstable self.FK_par=FK_params #self.FK_results=FK_results self.FD_par=FD_params #self.Fd_results=Fd_results self.ASSOC_par=ASSOC_params self.ASSOC_results=ASSOC_results self.ASSOC_par.__table__.create(self.session.bind,checkfirst=True) self.ASSOC_results.__table__.create(self.session.bind,checkfirst=True) try: self.Passoc_Q=self.session.query(self.ASSOC_par). \ filter(self.ASSOC_par.beamwidth==self.beamwidth).\ filter(self.ASSOC_par.rangemax==self.rangemax). \ filter(self.ASSOC_par.clusterthresh==self.clusterthresh).\ filter(self.ASSOC_par.trimthresh==self.trimthresh).\ filter(self.ASSOC_par.trimthreshscalar==self.trimthreshscalar).\ filter(self.ASSOC_par.mindetpop==self.mindetpop).\ filter(self.ASSOC_par.minarraypop==self.minarraypop).\ filter(self.ASSOC_par.duration==self.duration).\ all() if len(self.Passoc_Q)>1: print('issue with the database too many parameters entries, there should be just one') embed() if len(self.Passoc_Q)==1: self.Passoc_Q=self.Passoc_Q[0] except Exception as x1: print("issue with the table or first assoc entered") print(x1) embed() self.Passoc_Q=[] print(Passoc_Q) if bool(self.Passoc_Q)==False: print('New process parameters, write process to INFRA_ASSOC_PARAM table') new_row=self.session.query(self.ASSOC_par).count() try: res=self.ASSOC_par( beamwidth=self.beamwidth,\ rangemax=self.rangemax,\ clusterthresh=self.clusterthresh,\ trimthresh=self.trimthresh,\ trimthreshscalar=self.trimthreshscalar,\ mindetpop=self.mindetpop,\ minarraypop=self.minarraypop,\ algorithm=self.algorithm,\ duration=self.duration,\ passocid=new_row) except Exception as x1: print('problem writing to the assoc param file') print("Unexpected error:", x1) embed() self.session.add(res) self.session.commit() self.Passoc_Q=self.session.query(self.ASSOC_par). \ filter(self.ASSOC_par.beamwidth==self.beamwidth).\ filter(self.ASSOC_par.rangemax==self.rangemax). \ filter(self.ASSOC_par.clusterthresh==self.clusterthresh).\ filter(self.ASSOC_par.trimthresh==self.trimthresh).\ filter(self.ASSOC_par.trimthreshscalar==self.trimthreshscalar).\ filter(self.ASSOC_par.mindetpop==self.mindetpop).\ filter(self.ASSOC_par.minarraypop==self.minarraypop).\ filter(self.ASSOC_par.duration==self.duration).\ one() self.passocid=self.Passoc_Q.passocid #embed() else: print('process already in table: Assoc params table') self.passocid=self.Passoc_Q.passocid print(self.Passoc_Q) self.db_connected=True try: self.Affiliation_Q=self.session.query(self.Affiliation).filter(self.Affiliation.net==self.net).all() except Exception as ex1: print('Error with network retrieving', ex1) exit(0) refSTA=[] for aai in self.Affiliation_Q: try: STA_dataM=self.session.query(self.Site).filter(self.Site.sta==aai.sta).one() except Exception as ex1: #print print('there is more than just one station:', aai.sta,' ',ex1) embed() exit() #embed() refSTA.append(STA_dataM.refsta) refstations_l=list(set(refSTA)) refsta=[] #embed() for aai in refstations_l: STA_dataM=self.session.query(self.Site).filter(self.Site.refsta==str(aai)).all() array_lo=[] array_la=[] array_el=[] for sta_i in STA_dataM: array_la.append(sta_i.lat) array_lo.append(sta_i.lon) array_el.append(sta_i.elev) array_la=np.asarray(array_la) array_lo=np.asarray(array_lo) array_el=np.asarray(array_el) refsta.append({'lon':np.mean(array_lo),'lat':np.mean(array_la),'elev':np.mean(array_el),'name':aai,'numsta':len(array_la)}) self.det_tot=[] self.fdtable_name=[] self.fktables_names=[] for aai in refsta: #print('getting data from:',aai['name']) # here it looks for any detections in all the tables for ti in range(self.num_tables): try: fd_res=self.session.query(self.fdtables[ti]).filter(self.fdtables[ti].sta==aai['name']).filter(self.fdtables[ti].pfdid==self.pfdid).filter(self.fdtables[ti].pfkid==self.pfkid).all() times_ini=self.session.query(self.fdtables[ti].timeini).filter(self.fdtables[ti].sta==aai['name']).filter(self.fdtables[ti].pfdid==self.pfdid).filter(self.fdtables[ti].pfkid==self.pfkid).all() fk_table_names=self.session.query(self.fdtables[ti].fktablename).filter(self.fdtables[ti].sta==aai['name']).filter(self.fdtables[ti].pfdid==self.pfdid).filter(self.fdtables[ti].pfkid==self.pfkid).all() #embed() if len(fk_table_names)>0: for tnfk in fk_table_names: self.fktables_names.append(tnfk[0]) #embed() ''' try: if (tnfk[0] in self.dict_namefk)==False: self.fdtables=[] self.dict_namefk[tnfk[0]]=type(str(tnfk[0]) ,(schema.fk_results,),{'__tablename__':str(tnfk[0])}) except Exception as ex1: print(ex1,'303') ''' except Exception as x1: print('There is an error',x1) embed() exit() tt = np.unique(self.fktables_names) self.fktables=[] for fki in tt: self.fktables.append(type(str(fki),(schema.fk_results,),{'__tablename__':str(fki)})) return self.db_connected